Revert "Wires up WebrtcKeyValueBasedConfig in media engines."
This reverts commit 591b2ab82ead157b5f5a85d5082bd15fe8c51809. Reason for revert: Breaks downstream project Original change's description: > Wires up WebrtcKeyValueBasedConfig in media engines. > > This replaces field_trial:: -based functions from system_wrappers. > Field trials are still used as fallback, but injectable trials are now > possible. > > Bug: webrtc:11926 > Change-Id: I70f28c4fbabf6d9e55052342000e38612b46682c > Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/174261 > Reviewed-by: Per Kjellander <perkj@webrtc.org> > Reviewed-by: Kári Helgason <kthelgason@webrtc.org> > Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org> > Reviewed-by: Stefan Holmer <stefan@webrtc.org> > Reviewed-by: Sebastian Jansson <srte@webrtc.org> > Commit-Queue: Erik Språng <sprang@webrtc.org> > Cr-Commit-Position: refs/heads/master@{#32129} TBR=mbonadei@webrtc.org,kthelgason@webrtc.org,sprang@webrtc.org,stefan@webrtc.org,srte@webrtc.org,perkj@webrtc.org Change-Id: I3e169149a8b787aa6366bb357abb71794534c63a No-Presubmit: true No-Tree-Checks: true No-Try: true Bug: webrtc:11926 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/184507 Reviewed-by: Artem Titov <titovartem@webrtc.org> Commit-Queue: Artem Titov <titovartem@webrtc.org> Cr-Commit-Position: refs/heads/master@{#32132}
This commit is contained in:
parent
467b7c0ab2
commit
5956a17ed6
@ -55,7 +55,6 @@ if (!build_with_chromium) {
|
||||
"audio:audio_mixer_api",
|
||||
"audio_codecs:audio_codecs_api",
|
||||
"task_queue:default_task_queue_factory",
|
||||
"transport:field_trial_based_config",
|
||||
"video_codecs:video_codecs_api",
|
||||
]
|
||||
}
|
||||
|
||||
@ -18,7 +18,6 @@
|
||||
#include "api/rtc_event_log/rtc_event_log_factory.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/task_queue/default_task_queue_factory.h"
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "media/base/media_engine.h"
|
||||
#include "media/engine/webrtc_media_engine.h"
|
||||
#include "modules/audio_device/include/audio_device.h"
|
||||
@ -46,7 +45,6 @@ rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
|
||||
dependencies.call_factory = CreateCallFactory();
|
||||
dependencies.event_log_factory = std::make_unique<RtcEventLogFactory>(
|
||||
dependencies.task_queue_factory.get());
|
||||
dependencies.trials = std::make_unique<webrtc::FieldTrialBasedConfig>();
|
||||
|
||||
cricket::MediaEngineDependencies media_dependencies;
|
||||
media_dependencies.task_queue_factory = dependencies.task_queue_factory.get();
|
||||
@ -61,7 +59,6 @@ rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
|
||||
media_dependencies.audio_mixer = std::move(audio_mixer);
|
||||
media_dependencies.video_encoder_factory = std::move(video_encoder_factory);
|
||||
media_dependencies.video_decoder_factory = std::move(video_decoder_factory);
|
||||
media_dependencies.trials = dependencies.trials.get();
|
||||
dependencies.media_engine =
|
||||
cricket::CreateMediaEngine(std::move(media_dependencies));
|
||||
|
||||
|
||||
@ -260,8 +260,6 @@ class Call final : public webrtc::Call,
|
||||
|
||||
Stats GetStats() const override;
|
||||
|
||||
const WebRtcKeyValueConfig& trials() const override;
|
||||
|
||||
// Implements PacketReceiver.
|
||||
DeliveryStatus DeliverPacket(MediaType media_type,
|
||||
rtc::CopyOnWriteBuffer packet,
|
||||
@ -1114,10 +1112,6 @@ Call::Stats Call::GetStats() const {
|
||||
return stats;
|
||||
}
|
||||
|
||||
const WebRtcKeyValueConfig& Call::trials() const {
|
||||
return *config_.trials;
|
||||
}
|
||||
|
||||
void Call::SignalChannelNetworkState(MediaType media, NetworkState state) {
|
||||
RTC_DCHECK_RUN_ON(worker_thread_);
|
||||
switch (media) {
|
||||
|
||||
@ -156,8 +156,6 @@ class Call {
|
||||
virtual void SetClientBitratePreferences(
|
||||
const BitrateSettings& preferences) = 0;
|
||||
|
||||
virtual const WebRtcKeyValueConfig& trials() const = 0;
|
||||
|
||||
virtual ~Call() {}
|
||||
};
|
||||
|
||||
|
||||
@ -266,10 +266,6 @@ Call::Stats DegradedCall::GetStats() const {
|
||||
return call_->GetStats();
|
||||
}
|
||||
|
||||
const WebRtcKeyValueConfig& DegradedCall::trials() const {
|
||||
return call_->trials();
|
||||
}
|
||||
|
||||
void DegradedCall::SignalChannelNetworkState(MediaType media,
|
||||
NetworkState state) {
|
||||
call_->SignalChannelNetworkState(media, state);
|
||||
|
||||
@ -85,8 +85,6 @@ class DegradedCall : public Call, private PacketReceiver {
|
||||
|
||||
Stats GetStats() const override;
|
||||
|
||||
const WebRtcKeyValueConfig& trials() const override;
|
||||
|
||||
void SignalChannelNetworkState(MediaType media, NetworkState state) override;
|
||||
void OnAudioTransportOverheadChanged(
|
||||
int transport_overhead_per_packet) override;
|
||||
|
||||
@ -93,7 +93,6 @@ rtc_library("rtc_media_base") {
|
||||
"../api/crypto:frame_encryptor_interface",
|
||||
"../api/crypto:options",
|
||||
"../api/transport:stun_types",
|
||||
"../api/transport:webrtc_key_value_config",
|
||||
"../api/transport/rtp:rtp_source",
|
||||
"../api/video:video_bitrate_allocation",
|
||||
"../api/video:video_bitrate_allocator_factory",
|
||||
@ -288,8 +287,6 @@ rtc_library("rtc_audio_video") {
|
||||
"../api/audio_codecs:audio_codecs_api",
|
||||
"../api/task_queue",
|
||||
"../api/transport:bitrate_settings",
|
||||
"../api/transport:field_trial_based_config",
|
||||
"../api/transport:webrtc_key_value_config",
|
||||
"../api/transport/rtp:rtp_source",
|
||||
"../api/units:data_rate",
|
||||
"../api/video:video_bitrate_allocation",
|
||||
@ -327,6 +324,7 @@ rtc_library("rtc_audio_video") {
|
||||
"../rtc_base/system:rtc_export",
|
||||
"../rtc_base/third_party/base64",
|
||||
"../system_wrappers",
|
||||
"../system_wrappers:field_trial",
|
||||
"../system_wrappers:metrics",
|
||||
]
|
||||
absl_deps = [
|
||||
@ -458,7 +456,6 @@ if (rtc_include_tests) {
|
||||
"../api:call_api",
|
||||
"../api:fec_controller_api",
|
||||
"../api:scoped_refptr",
|
||||
"../api/transport:field_trial_based_config",
|
||||
"../api/video:encoded_image",
|
||||
"../api/video:video_bitrate_allocation",
|
||||
"../api/video:video_frame",
|
||||
|
||||
@ -161,20 +161,11 @@ webrtc::RTCError CheckRtpParametersInvalidModificationAndValues(
|
||||
}
|
||||
|
||||
CompositeMediaEngine::CompositeMediaEngine(
|
||||
std::unique_ptr<webrtc::WebRtcKeyValueConfig> trials,
|
||||
std::unique_ptr<VoiceEngineInterface> audio_engine,
|
||||
std::unique_ptr<VoiceEngineInterface> voice_engine,
|
||||
std::unique_ptr<VideoEngineInterface> video_engine)
|
||||
: trials_(std::move(trials)),
|
||||
voice_engine_(std::move(audio_engine)),
|
||||
: voice_engine_(std::move(voice_engine)),
|
||||
video_engine_(std::move(video_engine)) {}
|
||||
|
||||
CompositeMediaEngine::CompositeMediaEngine(
|
||||
std::unique_ptr<VoiceEngineInterface> audio_engine,
|
||||
std::unique_ptr<VideoEngineInterface> video_engine)
|
||||
: CompositeMediaEngine(nullptr,
|
||||
std::move(audio_engine),
|
||||
std::move(video_engine)) {}
|
||||
|
||||
CompositeMediaEngine::~CompositeMediaEngine() = default;
|
||||
|
||||
bool CompositeMediaEngine::Init() {
|
||||
|
||||
@ -19,7 +19,6 @@
|
||||
#include "api/audio_codecs/audio_encoder_factory.h"
|
||||
#include "api/crypto/crypto_options.h"
|
||||
#include "api/rtp_parameters.h"
|
||||
#include "api/transport/webrtc_key_value_config.h"
|
||||
#include "api/video/video_bitrate_allocator_factory.h"
|
||||
#include "call/audio_state.h"
|
||||
#include "media/base/codec.h"
|
||||
@ -132,12 +131,8 @@ class MediaEngineInterface {
|
||||
|
||||
// CompositeMediaEngine constructs a MediaEngine from separate
|
||||
// voice and video engine classes.
|
||||
// Optionally owns a WebRtcKeyValueConfig trials map.
|
||||
class CompositeMediaEngine : public MediaEngineInterface {
|
||||
public:
|
||||
CompositeMediaEngine(std::unique_ptr<webrtc::WebRtcKeyValueConfig> trials,
|
||||
std::unique_ptr<VoiceEngineInterface> audio_engine,
|
||||
std::unique_ptr<VideoEngineInterface> video_engine);
|
||||
CompositeMediaEngine(std::unique_ptr<VoiceEngineInterface> audio_engine,
|
||||
std::unique_ptr<VideoEngineInterface> video_engine);
|
||||
~CompositeMediaEngine() override;
|
||||
@ -149,7 +144,6 @@ class CompositeMediaEngine : public MediaEngineInterface {
|
||||
const VideoEngineInterface& video() const override;
|
||||
|
||||
private:
|
||||
const std::unique_ptr<webrtc::WebRtcKeyValueConfig> trials_;
|
||||
std::unique_ptr<VoiceEngineInterface> voice_engine_;
|
||||
std::unique_ptr<VideoEngineInterface> video_engine_;
|
||||
};
|
||||
|
||||
@ -24,7 +24,6 @@
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "api/video/video_frame.h"
|
||||
#include "call/audio_receive_stream.h"
|
||||
#include "call/audio_send_stream.h"
|
||||
@ -363,10 +362,6 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver {
|
||||
|
||||
webrtc::Call::Stats GetStats() const override;
|
||||
|
||||
const webrtc::WebRtcKeyValueConfig& trials() const override {
|
||||
return trials_;
|
||||
}
|
||||
|
||||
void SignalChannelNetworkState(webrtc::MediaType media,
|
||||
webrtc::NetworkState state) override;
|
||||
void OnAudioTransportOverheadChanged(
|
||||
@ -390,7 +385,6 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver {
|
||||
|
||||
int num_created_send_streams_;
|
||||
int num_created_receive_streams_;
|
||||
webrtc::FieldTrialBasedConfig trials_;
|
||||
};
|
||||
|
||||
} // namespace cricket
|
||||
|
||||
@ -15,7 +15,6 @@
|
||||
|
||||
#include "api/task_queue/default_task_queue_factory.h"
|
||||
#include "api/task_queue/task_queue_factory.h"
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "media/engine/webrtc_voice_engine.h"
|
||||
#include "modules/audio_device/include/mock_audio_device.h"
|
||||
#include "modules/audio_processing/include/audio_processing.h"
|
||||
@ -32,12 +31,11 @@ TEST(NullWebRtcVideoEngineTest, CheckInterface) {
|
||||
webrtc::CreateDefaultTaskQueueFactory();
|
||||
rtc::scoped_refptr<webrtc::test::MockAudioDeviceModule> adm =
|
||||
webrtc::test::MockAudioDeviceModule::CreateNice();
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
auto audio_engine = std::make_unique<WebRtcVoiceEngine>(
|
||||
task_queue_factory.get(), adm,
|
||||
webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
|
||||
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr,
|
||||
webrtc::AudioProcessingBuilder().Create(), trials);
|
||||
webrtc::AudioProcessingBuilder().Create());
|
||||
|
||||
CompositeMediaEngine engine(std::move(audio_engine),
|
||||
std::make_unique<NullWebRtcVideoEngine>());
|
||||
|
||||
@ -16,7 +16,6 @@
|
||||
#include <algorithm>
|
||||
#include <string>
|
||||
|
||||
#include "absl/strings/match.h"
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/video/video_codec_constants.h"
|
||||
#include "media/base/media_constants.h"
|
||||
@ -27,6 +26,7 @@
|
||||
#include "rtc_base/experiments/normalize_simulcast_size_experiment.h"
|
||||
#include "rtc_base/experiments/rate_control_settings.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "system_wrappers/include/field_trial.h"
|
||||
|
||||
namespace cricket {
|
||||
|
||||
@ -103,9 +103,7 @@ constexpr const SimulcastFormat kSimulcastFormats[] = {
|
||||
const int kMaxScreenshareSimulcastLayers = 2;
|
||||
|
||||
// Multiway: Number of temporal layers for each simulcast stream.
|
||||
int DefaultNumberOfTemporalLayers(int simulcast_id,
|
||||
bool screenshare,
|
||||
const webrtc::WebRtcKeyValueConfig& trials) {
|
||||
int DefaultNumberOfTemporalLayers(int simulcast_id, bool screenshare) {
|
||||
RTC_CHECK_GE(simulcast_id, 0);
|
||||
RTC_CHECK_LT(simulcast_id, webrtc::kMaxSimulcastStreams);
|
||||
|
||||
@ -116,8 +114,10 @@ int DefaultNumberOfTemporalLayers(int simulcast_id,
|
||||
: kDefaultNumTemporalLayers;
|
||||
|
||||
const std::string group_name =
|
||||
screenshare ? trials.Lookup("WebRTC-VP8ScreenshareTemporalLayers")
|
||||
: trials.Lookup("WebRTC-VP8ConferenceTemporalLayers");
|
||||
screenshare ? webrtc::field_trial::FindFullName(
|
||||
"WebRTC-VP8ScreenshareTemporalLayers")
|
||||
: webrtc::field_trial::FindFullName(
|
||||
"WebRTC-VP8ConferenceTemporalLayers");
|
||||
if (group_name.empty())
|
||||
return default_num_temporal_layers;
|
||||
|
||||
@ -231,10 +231,9 @@ webrtc::DataRate GetTotalMaxBitrate(
|
||||
size_t LimitSimulcastLayerCount(int width,
|
||||
int height,
|
||||
size_t need_layers,
|
||||
size_t layer_count,
|
||||
const webrtc::WebRtcKeyValueConfig& trials) {
|
||||
if (!absl::StartsWith(trials.Lookup(kUseLegacySimulcastLayerLimitFieldTrial),
|
||||
"Disabled")) {
|
||||
size_t layer_count) {
|
||||
if (!webrtc::field_trial::IsDisabled(
|
||||
kUseLegacySimulcastLayerLimitFieldTrial)) {
|
||||
size_t adaptive_layer_count = std::max(
|
||||
need_layers,
|
||||
kSimulcastFormats[FindSimulcastFormatIndex(width, height)].max_layers);
|
||||
@ -255,28 +254,27 @@ std::vector<webrtc::VideoStream> GetSimulcastConfig(
|
||||
double bitrate_priority,
|
||||
int max_qp,
|
||||
bool is_screenshare_with_conference_mode,
|
||||
bool temporal_layers_supported,
|
||||
const webrtc::WebRtcKeyValueConfig& trials) {
|
||||
bool temporal_layers_supported) {
|
||||
RTC_DCHECK_LE(min_layers, max_layers);
|
||||
RTC_DCHECK(max_layers > 1 || is_screenshare_with_conference_mode);
|
||||
|
||||
const bool base_heavy_tl3_rate_alloc =
|
||||
webrtc::RateControlSettings::ParseFromKeyValueConfig(&trials)
|
||||
webrtc::RateControlSettings::ParseFromFieldTrials()
|
||||
.Vp8BaseHeavyTl3RateAllocation();
|
||||
if (is_screenshare_with_conference_mode) {
|
||||
return GetScreenshareLayers(max_layers, width, height, bitrate_priority,
|
||||
max_qp, temporal_layers_supported,
|
||||
base_heavy_tl3_rate_alloc, trials);
|
||||
base_heavy_tl3_rate_alloc);
|
||||
} else {
|
||||
// Some applications rely on the old behavior limiting the simulcast layer
|
||||
// count based on the resolution automatically, which they can get through
|
||||
// the WebRTC-LegacySimulcastLayerLimit field trial until they update.
|
||||
max_layers =
|
||||
LimitSimulcastLayerCount(width, height, min_layers, max_layers, trials);
|
||||
LimitSimulcastLayerCount(width, height, min_layers, max_layers);
|
||||
|
||||
return GetNormalSimulcastLayers(max_layers, width, height, bitrate_priority,
|
||||
max_qp, temporal_layers_supported,
|
||||
base_heavy_tl3_rate_alloc, trials);
|
||||
base_heavy_tl3_rate_alloc);
|
||||
}
|
||||
}
|
||||
|
||||
@ -287,8 +285,7 @@ std::vector<webrtc::VideoStream> GetNormalSimulcastLayers(
|
||||
double bitrate_priority,
|
||||
int max_qp,
|
||||
bool temporal_layers_supported,
|
||||
bool base_heavy_tl3_rate_alloc,
|
||||
const webrtc::WebRtcKeyValueConfig& trials) {
|
||||
bool base_heavy_tl3_rate_alloc) {
|
||||
std::vector<webrtc::VideoStream> layers(layer_count);
|
||||
|
||||
// Format width and height has to be divisible by |2 ^ num_simulcast_layers -
|
||||
@ -303,13 +300,11 @@ std::vector<webrtc::VideoStream> GetNormalSimulcastLayers(
|
||||
// TODO(pbos): Fill actual temporal-layer bitrate thresholds.
|
||||
layers[s].max_qp = max_qp;
|
||||
layers[s].num_temporal_layers =
|
||||
temporal_layers_supported
|
||||
? DefaultNumberOfTemporalLayers(s, false, trials)
|
||||
: 1;
|
||||
temporal_layers_supported ? DefaultNumberOfTemporalLayers(s, false) : 1;
|
||||
layers[s].max_bitrate_bps = FindSimulcastMaxBitrate(width, height).bps();
|
||||
layers[s].target_bitrate_bps =
|
||||
FindSimulcastTargetBitrate(width, height).bps();
|
||||
int num_temporal_layers = DefaultNumberOfTemporalLayers(s, false, trials);
|
||||
int num_temporal_layers = DefaultNumberOfTemporalLayers(s, false);
|
||||
if (s == 0) {
|
||||
// If alternative temporal rate allocation is selected, adjust the
|
||||
// bitrate of the lowest simulcast stream so that absolute bitrate for
|
||||
@ -361,8 +356,7 @@ std::vector<webrtc::VideoStream> GetScreenshareLayers(
|
||||
double bitrate_priority,
|
||||
int max_qp,
|
||||
bool temporal_layers_supported,
|
||||
bool base_heavy_tl3_rate_alloc,
|
||||
const webrtc::WebRtcKeyValueConfig& trials) {
|
||||
bool base_heavy_tl3_rate_alloc) {
|
||||
auto max_screenshare_layers = kMaxScreenshareSimulcastLayers;
|
||||
size_t num_simulcast_layers =
|
||||
std::min<int>(max_layers, max_screenshare_layers);
|
||||
@ -385,8 +379,7 @@ std::vector<webrtc::VideoStream> GetScreenshareLayers(
|
||||
// restrictions. The base simulcast layer will still use legacy setup.
|
||||
if (num_simulcast_layers == kMaxScreenshareSimulcastLayers) {
|
||||
// Add optional upper simulcast layer.
|
||||
const int num_temporal_layers =
|
||||
DefaultNumberOfTemporalLayers(1, true, trials);
|
||||
const int num_temporal_layers = DefaultNumberOfTemporalLayers(1, true);
|
||||
int max_bitrate_bps;
|
||||
bool using_boosted_bitrate = false;
|
||||
if (!temporal_layers_supported) {
|
||||
@ -396,7 +389,7 @@ std::vector<webrtc::VideoStream> GetScreenshareLayers(
|
||||
kScreenshareHighStreamMaxBitrate.bps() *
|
||||
webrtc::SimulcastRateAllocator::GetTemporalRateAllocation(
|
||||
num_temporal_layers, 0, base_heavy_tl3_rate_alloc));
|
||||
} else if (DefaultNumberOfTemporalLayers(1, true, trials) != 3 ||
|
||||
} else if (DefaultNumberOfTemporalLayers(1, true) != 3 ||
|
||||
base_heavy_tl3_rate_alloc) {
|
||||
// Experimental temporal layer mode used, use increased max bitrate.
|
||||
max_bitrate_bps = kScreenshareHighStreamMaxBitrate.bps();
|
||||
@ -416,9 +409,7 @@ std::vector<webrtc::VideoStream> GetScreenshareLayers(
|
||||
layers[1].max_qp = max_qp;
|
||||
layers[1].max_framerate = kDefaultVideoMaxFramerate;
|
||||
layers[1].num_temporal_layers =
|
||||
temporal_layers_supported
|
||||
? DefaultNumberOfTemporalLayers(1, true, trials)
|
||||
: 1;
|
||||
temporal_layers_supported ? DefaultNumberOfTemporalLayers(1, true) : 1;
|
||||
layers[1].min_bitrate_bps = using_boosted_bitrate
|
||||
? kScreenshareHighStreamMinBitrate.bps()
|
||||
: layers[0].target_bitrate_bps * 2;
|
||||
|
||||
@ -15,7 +15,6 @@
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "api/transport/webrtc_key_value_config.h"
|
||||
#include "api/units/data_rate.h"
|
||||
#include "api/video_codecs/video_encoder_config.h"
|
||||
|
||||
@ -42,8 +41,7 @@ std::vector<webrtc::VideoStream> GetSimulcastConfig(
|
||||
double bitrate_priority,
|
||||
int max_qp,
|
||||
bool is_screenshare_with_conference_mode,
|
||||
bool temporal_layers_supported,
|
||||
const webrtc::WebRtcKeyValueConfig& trials);
|
||||
bool temporal_layers_supported);
|
||||
|
||||
// Gets the simulcast config layers for a non-screensharing case.
|
||||
std::vector<webrtc::VideoStream> GetNormalSimulcastLayers(
|
||||
@ -53,8 +51,7 @@ std::vector<webrtc::VideoStream> GetNormalSimulcastLayers(
|
||||
double bitrate_priority,
|
||||
int max_qp,
|
||||
bool temporal_layers_supported,
|
||||
bool base_heavy_tl3_rate_alloc,
|
||||
const webrtc::WebRtcKeyValueConfig& trials);
|
||||
bool base_heavy_tl3_rate_alloc);
|
||||
|
||||
// Gets simulcast config layers for screenshare settings.
|
||||
std::vector<webrtc::VideoStream> GetScreenshareLayers(
|
||||
@ -64,8 +61,7 @@ std::vector<webrtc::VideoStream> GetScreenshareLayers(
|
||||
double bitrate_priority,
|
||||
int max_qp,
|
||||
bool temporal_layers_supported,
|
||||
bool base_heavy_tl3_rate_alloc,
|
||||
const webrtc::WebRtcKeyValueConfig& trials);
|
||||
bool base_heavy_tl3_rate_alloc);
|
||||
|
||||
} // namespace cricket
|
||||
|
||||
|
||||
@ -10,7 +10,6 @@
|
||||
|
||||
#include "media/engine/simulcast.h"
|
||||
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "media/base/media_constants.h"
|
||||
#include "media/engine/constants.h"
|
||||
#include "test/field_trial.h"
|
||||
@ -81,13 +80,12 @@ TEST(SimulcastTest, BandwidthAboveTotalMaxBitrateGivenToHighestStream) {
|
||||
|
||||
TEST(SimulcastTest, GetConfig) {
|
||||
const std::vector<VideoStream> kExpected = GetSimulcastBitrates720p();
|
||||
const FieldTrialBasedConfig trials;
|
||||
|
||||
const size_t kMinLayers = 1;
|
||||
const size_t kMaxLayers = 3;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 1280, 720, kBitratePriority, kQpMax,
|
||||
!kScreenshare, true, trials);
|
||||
!kScreenshare, true);
|
||||
|
||||
EXPECT_EQ(kMaxLayers, streams.size());
|
||||
EXPECT_EQ(320u, streams[0].width);
|
||||
@ -115,7 +113,6 @@ TEST(SimulcastTest, GetConfig) {
|
||||
TEST(SimulcastTest, GetConfigWithBaseHeavyVP8TL3RateAllocation) {
|
||||
test::ScopedFieldTrials field_trials(
|
||||
"WebRTC-UseBaseHeavyVP8TL3RateAllocation/Enabled/");
|
||||
FieldTrialBasedConfig trials;
|
||||
|
||||
const std::vector<VideoStream> kExpected = GetSimulcastBitrates720p();
|
||||
|
||||
@ -123,7 +120,7 @@ TEST(SimulcastTest, GetConfigWithBaseHeavyVP8TL3RateAllocation) {
|
||||
const size_t kMaxLayers = 3;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 1280, 720, kBitratePriority, kQpMax,
|
||||
!kScreenshare, true, trials);
|
||||
!kScreenshare, true);
|
||||
|
||||
EXPECT_EQ(kExpected[0].min_bitrate_bps, streams[0].min_bitrate_bps);
|
||||
EXPECT_EQ(static_cast<int>(0.4 * kExpected[0].target_bitrate_bps / 0.6),
|
||||
@ -140,10 +137,9 @@ TEST(SimulcastTest, GetConfigWithBaseHeavyVP8TL3RateAllocation) {
|
||||
TEST(SimulcastTest, GetConfigWithLimitedMaxLayers) {
|
||||
const size_t kMinLayers = 1;
|
||||
const size_t kMaxLayers = 2;
|
||||
FieldTrialBasedConfig trials;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 1280, 720, kBitratePriority, kQpMax,
|
||||
!kScreenshare, true, trials);
|
||||
!kScreenshare, true);
|
||||
|
||||
EXPECT_EQ(kMaxLayers, streams.size());
|
||||
EXPECT_EQ(640u, streams[0].width);
|
||||
@ -155,12 +151,11 @@ TEST(SimulcastTest, GetConfigWithLimitedMaxLayers) {
|
||||
TEST(SimulcastTest, GetConfigWithLimitedMaxLayersForResolution) {
|
||||
test::ScopedFieldTrials field_trials(
|
||||
"WebRTC-LegacySimulcastLayerLimit/Enabled/");
|
||||
FieldTrialBasedConfig trials;
|
||||
const size_t kMinLayers = 1;
|
||||
const size_t kMaxLayers = 3;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 800, 600, kBitratePriority, kQpMax, !kScreenshare,
|
||||
true, trials);
|
||||
true);
|
||||
|
||||
EXPECT_EQ(2u, streams.size());
|
||||
EXPECT_EQ(400u, streams[0].width);
|
||||
@ -172,12 +167,11 @@ TEST(SimulcastTest, GetConfigWithLimitedMaxLayersForResolution) {
|
||||
TEST(SimulcastTest, GetConfigWithLowResolutionScreenshare) {
|
||||
test::ScopedFieldTrials field_trials(
|
||||
"WebRTC-LegacySimulcastLayerLimit/Enabled/");
|
||||
FieldTrialBasedConfig trials;
|
||||
const size_t kMinLayers = 1;
|
||||
const size_t kMaxLayers = 3;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 100, 100, kBitratePriority, kQpMax, kScreenshare,
|
||||
true, trials);
|
||||
std::vector<VideoStream> streams =
|
||||
cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 100, 100,
|
||||
kBitratePriority, kQpMax, kScreenshare, true);
|
||||
|
||||
// Simulcast streams number is never decreased for screenshare,
|
||||
// even for very low resolution.
|
||||
@ -187,12 +181,11 @@ TEST(SimulcastTest, GetConfigWithLowResolutionScreenshare) {
|
||||
TEST(SimulcastTest, GetConfigWithNotLimitedMaxLayersForResolution) {
|
||||
test::ScopedFieldTrials field_trials(
|
||||
"WebRTC-LegacySimulcastLayerLimit/Disabled/");
|
||||
FieldTrialBasedConfig trials;
|
||||
const size_t kMinLayers = 1;
|
||||
const size_t kMaxLayers = 3;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 800, 600, kBitratePriority, kQpMax, !kScreenshare,
|
||||
true, trials);
|
||||
true);
|
||||
|
||||
EXPECT_EQ(kMaxLayers, streams.size());
|
||||
EXPECT_EQ(200u, streams[0].width);
|
||||
@ -204,12 +197,11 @@ TEST(SimulcastTest, GetConfigWithNotLimitedMaxLayersForResolution) {
|
||||
}
|
||||
|
||||
TEST(SimulcastTest, GetConfigWithNormalizedResolution) {
|
||||
FieldTrialBasedConfig trials;
|
||||
const size_t kMinLayers = 1;
|
||||
const size_t kMaxLayers = 2;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 640 + 1, 360 + 1, kBitratePriority, kQpMax,
|
||||
!kScreenshare, true, trials);
|
||||
!kScreenshare, true);
|
||||
|
||||
// Must be divisible by |2 ^ (num_layers - 1)|.
|
||||
EXPECT_EQ(kMaxLayers, streams.size());
|
||||
@ -222,13 +214,12 @@ TEST(SimulcastTest, GetConfigWithNormalizedResolution) {
|
||||
TEST(SimulcastTest, GetConfigWithNormalizedResolutionDivisibleBy4) {
|
||||
test::ScopedFieldTrials field_trials(
|
||||
"WebRTC-NormalizeSimulcastResolution/Enabled-2/");
|
||||
FieldTrialBasedConfig trials;
|
||||
|
||||
const size_t kMinLayers = 1;
|
||||
const size_t kMaxLayers = 2;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 709, 501, kBitratePriority, kQpMax, !kScreenshare,
|
||||
true, trials);
|
||||
true);
|
||||
|
||||
// Must be divisible by |2 ^ 2|.
|
||||
EXPECT_EQ(kMaxLayers, streams.size());
|
||||
@ -241,13 +232,12 @@ TEST(SimulcastTest, GetConfigWithNormalizedResolutionDivisibleBy4) {
|
||||
TEST(SimulcastTest, GetConfigWithNormalizedResolutionDivisibleBy8) {
|
||||
test::ScopedFieldTrials field_trials(
|
||||
"WebRTC-NormalizeSimulcastResolution/Enabled-3/");
|
||||
FieldTrialBasedConfig trials;
|
||||
|
||||
const size_t kMinLayers = 1;
|
||||
const size_t kMaxLayers = 2;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 709, 501, kBitratePriority, kQpMax, !kScreenshare,
|
||||
true, trials);
|
||||
true);
|
||||
|
||||
// Must be divisible by |2 ^ 3|.
|
||||
EXPECT_EQ(kMaxLayers, streams.size());
|
||||
@ -260,56 +250,53 @@ TEST(SimulcastTest, GetConfigWithNormalizedResolutionDivisibleBy8) {
|
||||
TEST(SimulcastTest, GetConfigForLegacyLayerLimit) {
|
||||
test::ScopedFieldTrials field_trials(
|
||||
"WebRTC-LegacySimulcastLayerLimit/Enabled/");
|
||||
FieldTrialBasedConfig trials;
|
||||
|
||||
const size_t kMinLayers = 1;
|
||||
const int kMaxLayers = 3;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 320, 180, kBitratePriority, kQpMax, !kScreenshare,
|
||||
true, trials);
|
||||
true);
|
||||
EXPECT_EQ(1u, streams.size());
|
||||
|
||||
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 640, 360,
|
||||
kBitratePriority, kQpMax, !kScreenshare,
|
||||
true, trials);
|
||||
true);
|
||||
EXPECT_EQ(2u, streams.size());
|
||||
|
||||
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 1920, 1080,
|
||||
kBitratePriority, kQpMax, !kScreenshare,
|
||||
true, trials);
|
||||
true);
|
||||
EXPECT_EQ(3u, streams.size());
|
||||
}
|
||||
|
||||
TEST(SimulcastTest, GetConfigForLegacyLayerLimitWithRequiredHD) {
|
||||
test::ScopedFieldTrials field_trials(
|
||||
"WebRTC-LegacySimulcastLayerLimit/Enabled/");
|
||||
FieldTrialBasedConfig trials;
|
||||
|
||||
const size_t kMinLayers = 3; // "HD" layer must be present!
|
||||
const int kMaxLayers = 3;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 320, 180, kBitratePriority, kQpMax, !kScreenshare,
|
||||
true, trials);
|
||||
true);
|
||||
EXPECT_EQ(3u, streams.size());
|
||||
|
||||
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 640, 360,
|
||||
kBitratePriority, kQpMax, !kScreenshare,
|
||||
true, trials);
|
||||
true);
|
||||
EXPECT_EQ(3u, streams.size());
|
||||
|
||||
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 1920, 1080,
|
||||
kBitratePriority, kQpMax, !kScreenshare,
|
||||
true, trials);
|
||||
true);
|
||||
EXPECT_EQ(3u, streams.size());
|
||||
}
|
||||
|
||||
TEST(SimulcastTest, GetConfigForScreenshareSimulcast) {
|
||||
FieldTrialBasedConfig trials;
|
||||
const size_t kMinLayers = 1;
|
||||
const size_t kMaxLayers = 3;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 1400, 800, kBitratePriority, kQpMax, kScreenshare,
|
||||
true, trials);
|
||||
std::vector<VideoStream> streams =
|
||||
cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 1400, 800,
|
||||
kBitratePriority, kQpMax, kScreenshare, true);
|
||||
|
||||
EXPECT_GT(streams.size(), 1u);
|
||||
for (size_t i = 0; i < streams.size(); ++i) {
|
||||
@ -326,12 +313,11 @@ TEST(SimulcastTest, GetConfigForScreenshareSimulcast) {
|
||||
}
|
||||
|
||||
TEST(SimulcastTest, GetConfigForScreenshareSimulcastWithLimitedMaxLayers) {
|
||||
FieldTrialBasedConfig trials;
|
||||
const size_t kMinLayers = 1;
|
||||
const size_t kMaxLayers = 1;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 1400, 800, kBitratePriority, kQpMax, kScreenshare,
|
||||
true, trials);
|
||||
std::vector<VideoStream> streams =
|
||||
cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 1400, 800,
|
||||
kBitratePriority, kQpMax, kScreenshare, true);
|
||||
|
||||
EXPECT_EQ(kMaxLayers, streams.size());
|
||||
}
|
||||
@ -340,23 +326,22 @@ TEST(SimulcastTest, SimulcastScreenshareMaxBitrateAdjustedForResolution) {
|
||||
constexpr int kScreenshareHighStreamMinBitrateBps = 600000;
|
||||
constexpr int kScreenshareHighStreamMaxBitrateBps = 1250000;
|
||||
constexpr int kMaxBitrate960_540 = 1200000;
|
||||
FieldTrialBasedConfig trials;
|
||||
|
||||
// Normal case, max bitrate not limited by resolution.
|
||||
const size_t kMinLayers = 1;
|
||||
const size_t kMaxLayers = 2;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 1920, 1080, kBitratePriority, kQpMax,
|
||||
kScreenshare, true, trials);
|
||||
std::vector<VideoStream> streams =
|
||||
cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 1920, 1080,
|
||||
kBitratePriority, kQpMax, kScreenshare, true);
|
||||
EXPECT_EQ(kMaxLayers, streams.size());
|
||||
EXPECT_EQ(streams[1].max_bitrate_bps, kScreenshareHighStreamMaxBitrateBps);
|
||||
EXPECT_EQ(streams[1].min_bitrate_bps, kScreenshareHighStreamMinBitrateBps);
|
||||
EXPECT_GE(streams[1].max_bitrate_bps, streams[1].min_bitrate_bps);
|
||||
|
||||
// At 960x540, the max bitrate is limited to 900kbps.
|
||||
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 960, 540,
|
||||
kBitratePriority, kQpMax, kScreenshare,
|
||||
true, trials);
|
||||
streams =
|
||||
cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 960, 540,
|
||||
kBitratePriority, kQpMax, kScreenshare, true);
|
||||
EXPECT_EQ(kMaxLayers, streams.size());
|
||||
EXPECT_EQ(streams[1].max_bitrate_bps, kMaxBitrate960_540);
|
||||
EXPECT_EQ(streams[1].min_bitrate_bps, kScreenshareHighStreamMinBitrateBps);
|
||||
@ -364,9 +349,9 @@ TEST(SimulcastTest, SimulcastScreenshareMaxBitrateAdjustedForResolution) {
|
||||
|
||||
// At 480x270, the max bitrate is limited to 450kbps. This is lower than
|
||||
// the min bitrate, so use that as a lower bound.
|
||||
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 270,
|
||||
kBitratePriority, kQpMax, kScreenshare,
|
||||
true, trials);
|
||||
streams =
|
||||
cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 270,
|
||||
kBitratePriority, kQpMax, kScreenshare, true);
|
||||
EXPECT_EQ(kMaxLayers, streams.size());
|
||||
EXPECT_EQ(streams[1].max_bitrate_bps, kScreenshareHighStreamMinBitrateBps);
|
||||
EXPECT_EQ(streams[1].min_bitrate_bps, kScreenshareHighStreamMinBitrateBps);
|
||||
@ -374,12 +359,11 @@ TEST(SimulcastTest, SimulcastScreenshareMaxBitrateAdjustedForResolution) {
|
||||
}
|
||||
|
||||
TEST(SimulcastTest, AveragesBitratesForNonStandardResolution) {
|
||||
FieldTrialBasedConfig trials;
|
||||
const size_t kMinLayers = 1;
|
||||
const size_t kMaxLayers = 3;
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, 900, 800, kBitratePriority, kQpMax, !kScreenshare,
|
||||
true, trials);
|
||||
true);
|
||||
|
||||
EXPECT_EQ(kMaxLayers, streams.size());
|
||||
EXPECT_EQ(900u, streams[2].width);
|
||||
@ -396,11 +380,10 @@ TEST(SimulcastTest, BitratesForCloseToStandardResolution) {
|
||||
const size_t kWidth = 1280;
|
||||
const size_t kHeight = 716;
|
||||
const std::vector<VideoStream> kExpectedNear = GetSimulcastBitrates720p();
|
||||
FieldTrialBasedConfig trials;
|
||||
|
||||
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
kMinLayers, kMaxLayers, kWidth, kHeight, kBitratePriority, kQpMax,
|
||||
!kScreenshare, true, trials);
|
||||
!kScreenshare, true);
|
||||
|
||||
EXPECT_EQ(kMaxLayers, streams.size());
|
||||
EXPECT_EQ(kWidth, streams[2].width);
|
||||
|
||||
@ -14,8 +14,8 @@
|
||||
#include <utility>
|
||||
|
||||
#include "absl/algorithm/container.h"
|
||||
#include "absl/strings/match.h"
|
||||
#include "media/engine/webrtc_voice_engine.h"
|
||||
#include "system_wrappers/include/field_trial.h"
|
||||
|
||||
#ifdef HAVE_WEBRTC_VIDEO
|
||||
#include "media/engine/webrtc_video_engine.h"
|
||||
@ -27,27 +27,20 @@ namespace cricket {
|
||||
|
||||
std::unique_ptr<MediaEngineInterface> CreateMediaEngine(
|
||||
MediaEngineDependencies dependencies) {
|
||||
// TODO(sprang): Make populating |dependencies.trials| mandatory and remove
|
||||
// these fallbacks.
|
||||
std::unique_ptr<webrtc::WebRtcKeyValueConfig> fallback_trials(
|
||||
dependencies.trials ? nullptr : new webrtc::FieldTrialBasedConfig());
|
||||
const webrtc::WebRtcKeyValueConfig& trials =
|
||||
dependencies.trials ? *dependencies.trials : *fallback_trials;
|
||||
auto audio_engine = std::make_unique<WebRtcVoiceEngine>(
|
||||
dependencies.task_queue_factory, std::move(dependencies.adm),
|
||||
std::move(dependencies.audio_encoder_factory),
|
||||
std::move(dependencies.audio_decoder_factory),
|
||||
std::move(dependencies.audio_mixer),
|
||||
std::move(dependencies.audio_processing), trials);
|
||||
std::move(dependencies.audio_processing));
|
||||
#ifdef HAVE_WEBRTC_VIDEO
|
||||
auto video_engine = std::make_unique<WebRtcVideoEngine>(
|
||||
std::move(dependencies.video_encoder_factory),
|
||||
std::move(dependencies.video_decoder_factory), trials);
|
||||
std::move(dependencies.video_decoder_factory));
|
||||
#else
|
||||
auto video_engine = std::make_unique<NullWebRtcVideoEngine>();
|
||||
#endif
|
||||
return std::make_unique<CompositeMediaEngine>(std::move(fallback_trials),
|
||||
std::move(audio_engine),
|
||||
return std::make_unique<CompositeMediaEngine>(std::move(audio_engine),
|
||||
std::move(video_engine));
|
||||
}
|
||||
|
||||
@ -94,8 +87,7 @@ bool ValidateRtpExtensions(
|
||||
std::vector<webrtc::RtpExtension> FilterRtpExtensions(
|
||||
const std::vector<webrtc::RtpExtension>& extensions,
|
||||
bool (*supported)(absl::string_view),
|
||||
bool filter_redundant_extensions,
|
||||
const webrtc::WebRtcKeyValueConfig& trials) {
|
||||
bool filter_redundant_extensions) {
|
||||
RTC_DCHECK(ValidateRtpExtensions(extensions));
|
||||
RTC_DCHECK(supported);
|
||||
std::vector<webrtc::RtpExtension> result;
|
||||
@ -129,8 +121,7 @@ std::vector<webrtc::RtpExtension> FilterRtpExtensions(
|
||||
result.erase(it, result.end());
|
||||
|
||||
// Keep just the highest priority extension of any in the following lists.
|
||||
if (absl::StartsWith(trials.Lookup("WebRTC-FilterAbsSendTimeExtension"),
|
||||
"Enabled")) {
|
||||
if (webrtc::field_trial::IsEnabled("WebRTC-FilterAbsSendTimeExtension")) {
|
||||
static const char* const kBweExtensionPriorities[] = {
|
||||
webrtc::RtpExtension::kTransportSequenceNumberUri,
|
||||
webrtc::RtpExtension::kAbsSendTimeUri,
|
||||
|
||||
@ -21,7 +21,6 @@
|
||||
#include "api/rtp_parameters.h"
|
||||
#include "api/task_queue/task_queue_factory.h"
|
||||
#include "api/transport/bitrate_settings.h"
|
||||
#include "api/transport/webrtc_key_value_config.h"
|
||||
#include "api/video_codecs/video_decoder_factory.h"
|
||||
#include "api/video_codecs/video_encoder_factory.h"
|
||||
#include "media/base/codec.h"
|
||||
@ -49,8 +48,6 @@ struct MediaEngineDependencies {
|
||||
|
||||
std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory;
|
||||
std::unique_ptr<webrtc::VideoDecoderFactory> video_decoder_factory;
|
||||
|
||||
const webrtc::WebRtcKeyValueConfig* trials = nullptr;
|
||||
};
|
||||
|
||||
// CreateMediaEngine may be called on any thread, though the engine is
|
||||
@ -69,8 +66,7 @@ bool ValidateRtpExtensions(const std::vector<webrtc::RtpExtension>& extensions);
|
||||
std::vector<webrtc::RtpExtension> FilterRtpExtensions(
|
||||
const std::vector<webrtc::RtpExtension>& extensions,
|
||||
bool (*supported)(absl::string_view),
|
||||
bool filter_redundant_extensions,
|
||||
const webrtc::WebRtcKeyValueConfig& trials);
|
||||
bool filter_redundant_extensions);
|
||||
|
||||
webrtc::BitrateConstraints GetBitrateConfigForCodec(const Codec& codec);
|
||||
|
||||
|
||||
@ -13,7 +13,6 @@
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "media/engine/webrtc_media_engine_defaults.h"
|
||||
#include "test/field_trial.h"
|
||||
#include "test/gtest.h"
|
||||
@ -102,17 +101,15 @@ TEST(WebRtcMediaEngineTest, ValidateRtpExtensions_OverlappingIds_EndOfSet) {
|
||||
|
||||
TEST(WebRtcMediaEngineTest, FilterRtpExtensions_EmptyList) {
|
||||
std::vector<RtpExtension> extensions;
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
std::vector<webrtc::RtpExtension> filtered =
|
||||
FilterRtpExtensions(extensions, SupportedExtensions1, true, trials);
|
||||
FilterRtpExtensions(extensions, SupportedExtensions1, true);
|
||||
EXPECT_EQ(0u, filtered.size());
|
||||
}
|
||||
|
||||
TEST(WebRtcMediaEngineTest, FilterRtpExtensions_IncludeOnlySupported) {
|
||||
std::vector<RtpExtension> extensions = MakeUniqueExtensions();
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
std::vector<webrtc::RtpExtension> filtered =
|
||||
FilterRtpExtensions(extensions, SupportedExtensions1, false, trials);
|
||||
FilterRtpExtensions(extensions, SupportedExtensions1, false);
|
||||
EXPECT_EQ(2u, filtered.size());
|
||||
EXPECT_EQ("c", filtered[0].uri);
|
||||
EXPECT_EQ("i", filtered[1].uri);
|
||||
@ -120,27 +117,24 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensions_IncludeOnlySupported) {
|
||||
|
||||
TEST(WebRtcMediaEngineTest, FilterRtpExtensions_SortedByName_1) {
|
||||
std::vector<RtpExtension> extensions = MakeUniqueExtensions();
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
std::vector<webrtc::RtpExtension> filtered =
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, false, trials);
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, false);
|
||||
EXPECT_EQ(12u, filtered.size());
|
||||
EXPECT_TRUE(IsSorted(filtered));
|
||||
}
|
||||
|
||||
TEST(WebRtcMediaEngineTest, FilterRtpExtensions_SortedByName_2) {
|
||||
std::vector<RtpExtension> extensions = MakeUniqueExtensions();
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
std::vector<webrtc::RtpExtension> filtered =
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true, trials);
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true);
|
||||
EXPECT_EQ(12u, filtered.size());
|
||||
EXPECT_TRUE(IsSorted(filtered));
|
||||
}
|
||||
|
||||
TEST(WebRtcMediaEngineTest, FilterRtpExtensions_DontRemoveRedundant) {
|
||||
std::vector<RtpExtension> extensions = MakeRedundantExtensions();
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
std::vector<webrtc::RtpExtension> filtered =
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, false, trials);
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, false);
|
||||
EXPECT_EQ(12u, filtered.size());
|
||||
EXPECT_TRUE(IsSorted(filtered));
|
||||
EXPECT_EQ(filtered[0].uri, filtered[1].uri);
|
||||
@ -148,9 +142,8 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensions_DontRemoveRedundant) {
|
||||
|
||||
TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundant) {
|
||||
std::vector<RtpExtension> extensions = MakeRedundantExtensions();
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
std::vector<webrtc::RtpExtension> filtered =
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true, trials);
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true);
|
||||
EXPECT_EQ(6u, filtered.size());
|
||||
EXPECT_TRUE(IsSorted(filtered));
|
||||
EXPECT_NE(filtered[0].uri, filtered[1].uri);
|
||||
@ -162,9 +155,8 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantEncrypted_1) {
|
||||
extensions.push_back(webrtc::RtpExtension("b", 2, true));
|
||||
extensions.push_back(webrtc::RtpExtension("c", 3));
|
||||
extensions.push_back(webrtc::RtpExtension("b", 4));
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
std::vector<webrtc::RtpExtension> filtered =
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true, trials);
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true);
|
||||
EXPECT_EQ(3u, filtered.size());
|
||||
EXPECT_TRUE(IsSorted(filtered));
|
||||
EXPECT_EQ(filtered[0].uri, filtered[1].uri);
|
||||
@ -179,9 +171,8 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantEncrypted_2) {
|
||||
extensions.push_back(webrtc::RtpExtension("b", 2));
|
||||
extensions.push_back(webrtc::RtpExtension("c", 3));
|
||||
extensions.push_back(webrtc::RtpExtension("b", 4));
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
std::vector<webrtc::RtpExtension> filtered =
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true, trials);
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true);
|
||||
EXPECT_EQ(3u, filtered.size());
|
||||
EXPECT_TRUE(IsSorted(filtered));
|
||||
EXPECT_EQ(filtered[0].uri, filtered[1].uri);
|
||||
@ -193,7 +184,6 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantEncrypted_2) {
|
||||
TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantBwe_1) {
|
||||
webrtc::test::ScopedFieldTrials override_field_trials_(
|
||||
"WebRTC-FilterAbsSendTimeExtension/Enabled/");
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
std::vector<RtpExtension> extensions;
|
||||
extensions.push_back(
|
||||
RtpExtension(RtpExtension::kTransportSequenceNumberUri, 3));
|
||||
@ -203,7 +193,7 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantBwe_1) {
|
||||
RtpExtension(RtpExtension::kTransportSequenceNumberUri, 1));
|
||||
extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 14));
|
||||
std::vector<webrtc::RtpExtension> filtered =
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true, trials);
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true);
|
||||
EXPECT_EQ(1u, filtered.size());
|
||||
EXPECT_EQ(RtpExtension::kTransportSequenceNumberUri, filtered[0].uri);
|
||||
}
|
||||
@ -218,9 +208,8 @@ TEST(WebRtcMediaEngineTest,
|
||||
extensions.push_back(
|
||||
RtpExtension(RtpExtension::kTransportSequenceNumberUri, 1));
|
||||
extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 14));
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
std::vector<webrtc::RtpExtension> filtered =
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true, trials);
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true);
|
||||
EXPECT_EQ(2u, filtered.size());
|
||||
EXPECT_EQ(RtpExtension::kTransportSequenceNumberUri, filtered[0].uri);
|
||||
EXPECT_EQ(RtpExtension::kAbsSendTimeUri, filtered[1].uri);
|
||||
@ -229,7 +218,6 @@ TEST(WebRtcMediaEngineTest,
|
||||
TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantBweEncrypted_1) {
|
||||
webrtc::test::ScopedFieldTrials override_field_trials_(
|
||||
"WebRTC-FilterAbsSendTimeExtension/Enabled/");
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
std::vector<RtpExtension> extensions;
|
||||
extensions.push_back(
|
||||
RtpExtension(RtpExtension::kTransportSequenceNumberUri, 3));
|
||||
@ -243,7 +231,7 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantBweEncrypted_1) {
|
||||
RtpExtension(RtpExtension::kTransportSequenceNumberUri, 2, true));
|
||||
extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 14));
|
||||
std::vector<webrtc::RtpExtension> filtered =
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true, trials);
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true);
|
||||
EXPECT_EQ(2u, filtered.size());
|
||||
EXPECT_EQ(RtpExtension::kTransportSequenceNumberUri, filtered[0].uri);
|
||||
EXPECT_EQ(RtpExtension::kTransportSequenceNumberUri, filtered[1].uri);
|
||||
@ -264,9 +252,8 @@ TEST(WebRtcMediaEngineTest,
|
||||
extensions.push_back(
|
||||
RtpExtension(RtpExtension::kTransportSequenceNumberUri, 2, true));
|
||||
extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 14));
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
std::vector<webrtc::RtpExtension> filtered =
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true, trials);
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true);
|
||||
EXPECT_EQ(3u, filtered.size());
|
||||
EXPECT_EQ(RtpExtension::kTransportSequenceNumberUri, filtered[0].uri);
|
||||
EXPECT_EQ(RtpExtension::kTransportSequenceNumberUri, filtered[1].uri);
|
||||
@ -279,9 +266,8 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantBwe_2) {
|
||||
extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 1));
|
||||
extensions.push_back(RtpExtension(RtpExtension::kAbsSendTimeUri, 14));
|
||||
extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 7));
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
std::vector<webrtc::RtpExtension> filtered =
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true, trials);
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true);
|
||||
EXPECT_EQ(1u, filtered.size());
|
||||
EXPECT_EQ(RtpExtension::kAbsSendTimeUri, filtered[0].uri);
|
||||
}
|
||||
@ -290,9 +276,8 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantBwe_3) {
|
||||
std::vector<RtpExtension> extensions;
|
||||
extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 2));
|
||||
extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 14));
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
std::vector<webrtc::RtpExtension> filtered =
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true, trials);
|
||||
FilterRtpExtensions(extensions, SupportedExtensions2, true);
|
||||
EXPECT_EQ(1u, filtered.size());
|
||||
EXPECT_EQ(RtpExtension::kTimestampOffsetUri, filtered[0].uri);
|
||||
}
|
||||
@ -300,8 +285,6 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantBwe_3) {
|
||||
TEST(WebRtcMediaEngineTest, Create) {
|
||||
MediaEngineDependencies deps;
|
||||
webrtc::SetMediaEngineDefaults(&deps);
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
deps.trials = &trials;
|
||||
|
||||
std::unique_ptr<MediaEngineInterface> engine =
|
||||
CreateMediaEngine(std::move(deps));
|
||||
|
||||
@ -40,6 +40,7 @@
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
#include "rtc_base/time_utils.h"
|
||||
#include "rtc_base/trace_event.h"
|
||||
#include "system_wrappers/include/field_trial.h"
|
||||
|
||||
namespace cricket {
|
||||
|
||||
@ -60,13 +61,23 @@ const char* StreamTypeToString(
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
bool IsEnabled(const webrtc::WebRtcKeyValueConfig& trials,
|
||||
absl::string_view name) {
|
||||
return absl::StartsWith(trials.Lookup(name), "Enabled");
|
||||
// If this field trial is enabled, we will enable sending FlexFEC and disable
|
||||
// sending ULPFEC whenever the former has been negotiated in the SDPs.
|
||||
bool IsFlexfecFieldTrialEnabled() {
|
||||
return webrtc::field_trial::IsEnabled("WebRTC-FlexFEC-03");
|
||||
}
|
||||
|
||||
void AddDefaultFeedbackParams(VideoCodec* codec,
|
||||
const webrtc::WebRtcKeyValueConfig& trials) {
|
||||
// If this field trial is enabled, the "flexfec-03" codec will be advertised
|
||||
// as being supported. This means that "flexfec-03" will appear in the default
|
||||
// SDP offer, and we therefore need to be ready to receive FlexFEC packets from
|
||||
// the remote. It also means that FlexFEC SSRCs will be generated by
|
||||
// MediaSession and added as "a=ssrc:" and "a=ssrc-group:" lines in the local
|
||||
// SDP.
|
||||
bool IsFlexfecAdvertisedFieldTrialEnabled() {
|
||||
return webrtc::field_trial::IsEnabled("WebRTC-FlexFEC-03-Advertised");
|
||||
}
|
||||
|
||||
void AddDefaultFeedbackParams(VideoCodec* codec) {
|
||||
// Don't add any feedback params for RED and ULPFEC.
|
||||
if (codec->name == kRedCodecName || codec->name == kUlpfecCodecName)
|
||||
return;
|
||||
@ -80,7 +91,7 @@ void AddDefaultFeedbackParams(VideoCodec* codec,
|
||||
codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamNack, kParamValueEmpty));
|
||||
codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamNack, kRtcpFbNackParamPli));
|
||||
if (codec->name == kVp8CodecName &&
|
||||
IsEnabled(trials, "WebRTC-RtcpLossNotification")) {
|
||||
webrtc::field_trial::IsEnabled("WebRTC-RtcpLossNotification")) {
|
||||
codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamLntf, kParamValueEmpty));
|
||||
}
|
||||
}
|
||||
@ -90,8 +101,7 @@ void AddDefaultFeedbackParams(VideoCodec* codec,
|
||||
// codecs for recognized codecs (VP8, VP9, H264, and RED). It will also add
|
||||
// default feedback params to the codecs.
|
||||
std::vector<VideoCodec> AssignPayloadTypesAndDefaultCodecs(
|
||||
std::vector<webrtc::SdpVideoFormat> input_formats,
|
||||
const webrtc::WebRtcKeyValueConfig& trials) {
|
||||
std::vector<webrtc::SdpVideoFormat> input_formats) {
|
||||
if (input_formats.empty())
|
||||
return std::vector<VideoCodec>();
|
||||
static const int kFirstDynamicPayloadType = 96;
|
||||
@ -101,7 +111,7 @@ std::vector<VideoCodec> AssignPayloadTypesAndDefaultCodecs(
|
||||
input_formats.push_back(webrtc::SdpVideoFormat(kRedCodecName));
|
||||
input_formats.push_back(webrtc::SdpVideoFormat(kUlpfecCodecName));
|
||||
|
||||
if (IsEnabled(trials, "WebRTC-FlexFEC-03-Advertised")) {
|
||||
if (IsFlexfecAdvertisedFieldTrialEnabled()) {
|
||||
webrtc::SdpVideoFormat flexfec_format(kFlexfecCodecName);
|
||||
// This value is currently arbitrarily set to 10 seconds. (The unit
|
||||
// is microseconds.) This parameter MUST be present in the SDP, but
|
||||
@ -115,7 +125,7 @@ std::vector<VideoCodec> AssignPayloadTypesAndDefaultCodecs(
|
||||
for (const webrtc::SdpVideoFormat& format : input_formats) {
|
||||
VideoCodec codec(format);
|
||||
codec.id = payload_type;
|
||||
AddDefaultFeedbackParams(&codec, trials);
|
||||
AddDefaultFeedbackParams(&codec);
|
||||
output_codecs.push_back(codec);
|
||||
|
||||
// Increment payload type.
|
||||
@ -149,8 +159,7 @@ std::vector<VideoCodec> AssignPayloadTypesAndDefaultCodecs(
|
||||
template <class T>
|
||||
std::vector<VideoCodec> GetPayloadTypesAndDefaultCodecs(
|
||||
const T* factory,
|
||||
bool is_decoder_factory,
|
||||
const webrtc::WebRtcKeyValueConfig& trials) {
|
||||
bool is_decoder_factory) {
|
||||
if (!factory) {
|
||||
return {};
|
||||
}
|
||||
@ -161,8 +170,7 @@ std::vector<VideoCodec> GetPayloadTypesAndDefaultCodecs(
|
||||
AddH264ConstrainedBaselineProfileToSupportedFormats(&supported_formats);
|
||||
}
|
||||
|
||||
return AssignPayloadTypesAndDefaultCodecs(std::move(supported_formats),
|
||||
trials);
|
||||
return AssignPayloadTypesAndDefaultCodecs(std::move(supported_formats));
|
||||
}
|
||||
|
||||
bool IsTemporalLayersSupported(const std::string& codec_name) {
|
||||
@ -237,9 +245,8 @@ static bool ValidateStreamParams(const StreamParams& sp) {
|
||||
}
|
||||
|
||||
// Returns true if the given codec is disallowed from doing simulcast.
|
||||
bool IsCodecDisabledForSimulcast(const std::string& codec_name,
|
||||
const webrtc::WebRtcKeyValueConfig& trials) {
|
||||
return !absl::StartsWith(trials.Lookup("WebRTC-H264Simulcast"), "Disabled")
|
||||
bool IsCodecDisabledForSimulcast(const std::string& codec_name) {
|
||||
return !webrtc::field_trial::IsDisabled("WebRTC-H264Simulcast")
|
||||
? absl::EqualsIgnoreCase(codec_name, kVp9CodecName)
|
||||
: absl::EqualsIgnoreCase(codec_name, kH264CodecName) ||
|
||||
absl::EqualsIgnoreCase(codec_name, kVp9CodecName);
|
||||
@ -265,11 +272,9 @@ static int GetMaxDefaultVideoBitrateKbps(int width,
|
||||
return max_bitrate;
|
||||
}
|
||||
|
||||
bool GetVp9LayersFromFieldTrialGroup(
|
||||
size_t* num_spatial_layers,
|
||||
size_t* num_temporal_layers,
|
||||
const webrtc::WebRtcKeyValueConfig& trials) {
|
||||
std::string group = trials.Lookup("WebRTC-SupportVP9SVC");
|
||||
bool GetVp9LayersFromFieldTrialGroup(size_t* num_spatial_layers,
|
||||
size_t* num_temporal_layers) {
|
||||
std::string group = webrtc::field_trial::FindFullName("WebRTC-SupportVP9SVC");
|
||||
if (group.empty())
|
||||
return false;
|
||||
|
||||
@ -288,21 +293,19 @@ bool GetVp9LayersFromFieldTrialGroup(
|
||||
return true;
|
||||
}
|
||||
|
||||
absl::optional<size_t> GetVp9SpatialLayersFromFieldTrial(
|
||||
const webrtc::WebRtcKeyValueConfig& trials) {
|
||||
absl::optional<size_t> GetVp9SpatialLayersFromFieldTrial() {
|
||||
size_t num_sl;
|
||||
size_t num_tl;
|
||||
if (GetVp9LayersFromFieldTrialGroup(&num_sl, &num_tl, trials)) {
|
||||
if (GetVp9LayersFromFieldTrialGroup(&num_sl, &num_tl)) {
|
||||
return num_sl;
|
||||
}
|
||||
return absl::nullopt;
|
||||
}
|
||||
|
||||
absl::optional<size_t> GetVp9TemporalLayersFromFieldTrial(
|
||||
const webrtc::WebRtcKeyValueConfig& trials) {
|
||||
absl::optional<size_t> GetVp9TemporalLayersFromFieldTrial() {
|
||||
size_t num_sl;
|
||||
size_t num_tl;
|
||||
if (GetVp9LayersFromFieldTrialGroup(&num_sl, &num_tl, trials)) {
|
||||
if (GetVp9LayersFromFieldTrialGroup(&num_sl, &num_tl)) {
|
||||
return num_tl;
|
||||
}
|
||||
return absl::nullopt;
|
||||
@ -464,14 +467,14 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings(
|
||||
const size_t default_num_spatial_layers =
|
||||
parameters_.config.rtp.ssrcs.size();
|
||||
const size_t num_spatial_layers =
|
||||
GetVp9SpatialLayersFromFieldTrial(call_->trials())
|
||||
.value_or(default_num_spatial_layers);
|
||||
GetVp9SpatialLayersFromFieldTrial().value_or(
|
||||
default_num_spatial_layers);
|
||||
|
||||
const size_t default_num_temporal_layers =
|
||||
num_spatial_layers > 1 ? kConferenceDefaultNumTemporalLayers : 1;
|
||||
const size_t num_temporal_layers =
|
||||
GetVp9TemporalLayersFromFieldTrial(call_->trials())
|
||||
.value_or(default_num_temporal_layers);
|
||||
GetVp9TemporalLayersFromFieldTrial().value_or(
|
||||
default_num_temporal_layers);
|
||||
|
||||
vp9_settings.numberOfSpatialLayers = std::min<unsigned char>(
|
||||
num_spatial_layers, kConferenceMaxNumSpatialLayers);
|
||||
@ -493,7 +496,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings(
|
||||
{"onkeypic", webrtc::InterLayerPredMode::kOnKeyPic}});
|
||||
webrtc::ParseFieldTrial(
|
||||
{&interlayer_pred_experiment_enabled, &inter_layer_pred_mode},
|
||||
call_->trials().Lookup("WebRTC-Vp9InterLayerPred"));
|
||||
webrtc::field_trial::FindFullName("WebRTC-Vp9InterLayerPred"));
|
||||
if (interlayer_pred_experiment_enabled) {
|
||||
vp9_settings.interLayerPred = inter_layer_pred_mode;
|
||||
} else {
|
||||
@ -564,11 +567,9 @@ void DefaultUnsignalledSsrcHandler::SetDefaultSink(
|
||||
|
||||
WebRtcVideoEngine::WebRtcVideoEngine(
|
||||
std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory,
|
||||
std::unique_ptr<webrtc::VideoDecoderFactory> video_decoder_factory,
|
||||
const webrtc::WebRtcKeyValueConfig& trials)
|
||||
std::unique_ptr<webrtc::VideoDecoderFactory> video_decoder_factory)
|
||||
: decoder_factory_(std::move(video_decoder_factory)),
|
||||
encoder_factory_(std::move(video_encoder_factory)),
|
||||
trials_(trials) {
|
||||
encoder_factory_(std::move(video_encoder_factory)) {
|
||||
RTC_LOG(LS_INFO) << "WebRtcVideoEngine::WebRtcVideoEngine()";
|
||||
}
|
||||
|
||||
@ -589,12 +590,12 @@ VideoMediaChannel* WebRtcVideoEngine::CreateMediaChannel(
|
||||
}
|
||||
std::vector<VideoCodec> WebRtcVideoEngine::send_codecs() const {
|
||||
return GetPayloadTypesAndDefaultCodecs(encoder_factory_.get(),
|
||||
/*is_decoder_factory=*/false, trials_);
|
||||
/*is_decoder_factory=*/false);
|
||||
}
|
||||
|
||||
std::vector<VideoCodec> WebRtcVideoEngine::recv_codecs() const {
|
||||
return GetPayloadTypesAndDefaultCodecs(decoder_factory_.get(),
|
||||
/*is_decoder_factory=*/true, trials_);
|
||||
/*is_decoder_factory=*/true);
|
||||
}
|
||||
|
||||
std::vector<webrtc::RtpHeaderExtensionCapability>
|
||||
@ -613,8 +614,9 @@ WebRtcVideoEngine::GetRtpHeaderExtensions() const {
|
||||
webrtc::RtpExtension::kRidUri, webrtc::RtpExtension::kRepairedRidUri}) {
|
||||
result.emplace_back(uri, id++, webrtc::RtpTransceiverDirection::kSendRecv);
|
||||
}
|
||||
result.emplace_back(webrtc::RtpExtension::kGenericFrameDescriptorUri00, id,
|
||||
IsEnabled(trials_, "WebRTC-GenericDescriptorAdvertised")
|
||||
result.emplace_back(
|
||||
webrtc::RtpExtension::kGenericFrameDescriptorUri00, id,
|
||||
webrtc::field_trial::IsEnabled("WebRTC-GenericDescriptorAdvertised")
|
||||
? webrtc::RtpTransceiverDirection::kSendRecv
|
||||
: webrtc::RtpTransceiverDirection::kStopped);
|
||||
return result;
|
||||
@ -638,12 +640,11 @@ WebRtcVideoChannel::WebRtcVideoChannel(
|
||||
bitrate_allocator_factory_(bitrate_allocator_factory),
|
||||
default_send_options_(options),
|
||||
last_stats_log_ms_(-1),
|
||||
discard_unknown_ssrc_packets_(
|
||||
IsEnabled(call_->trials(),
|
||||
discard_unknown_ssrc_packets_(webrtc::field_trial::IsEnabled(
|
||||
"WebRTC-Video-DiscardPacketsWithUnknownSsrc")),
|
||||
crypto_options_(crypto_options),
|
||||
unknown_ssrc_packet_buffer_(
|
||||
IsEnabled(call_->trials(),
|
||||
webrtc::field_trial::IsEnabled(
|
||||
"WebRTC-Video-BufferPacketsWithUnknownSsrc")
|
||||
? new UnhandledPacketsBuffer()
|
||||
: nullptr) {
|
||||
@ -652,7 +653,7 @@ WebRtcVideoChannel::WebRtcVideoChannel(
|
||||
rtcp_receiver_report_ssrc_ = kDefaultRtcpReceiverReportSsrc;
|
||||
sending_ = false;
|
||||
recv_codecs_ = MapCodecs(GetPayloadTypesAndDefaultCodecs(
|
||||
decoder_factory_, /*is_decoder_factory=*/true, call_->trials()));
|
||||
decoder_factory_, /*is_decoder_factory=*/true));
|
||||
recv_flexfec_payload_type_ =
|
||||
recv_codecs_.empty() ? 0 : recv_codecs_.front().flexfec_payload_type;
|
||||
}
|
||||
@ -746,7 +747,7 @@ bool WebRtcVideoChannel::GetChangedSendParameters(
|
||||
}
|
||||
|
||||
// Never enable sending FlexFEC, unless we are in the experiment.
|
||||
if (!IsEnabled(call_->trials(), "WebRTC-FlexFEC-03")) {
|
||||
if (!IsFlexfecFieldTrialEnabled()) {
|
||||
RTC_LOG(LS_INFO) << "WebRTC-FlexFEC-03 field trial is not enabled.";
|
||||
for (VideoCodecSettings& codec : negotiated_codecs)
|
||||
codec.flexfec_payload_type = -1;
|
||||
@ -766,8 +767,7 @@ bool WebRtcVideoChannel::GetChangedSendParameters(
|
||||
changed_params->extmap_allow_mixed = params.extmap_allow_mixed;
|
||||
}
|
||||
std::vector<webrtc::RtpExtension> filtered_extensions = FilterRtpExtensions(
|
||||
params.extensions, webrtc::RtpExtension::IsSupportedForVideo, true,
|
||||
call_->trials());
|
||||
params.extensions, webrtc::RtpExtension::IsSupportedForVideo, true);
|
||||
if (!send_rtp_extensions_ || (*send_rtp_extensions_ != filtered_extensions)) {
|
||||
changed_params->rtp_header_extensions =
|
||||
absl::optional<std::vector<webrtc::RtpExtension>>(filtered_extensions);
|
||||
@ -1119,8 +1119,7 @@ bool WebRtcVideoChannel::GetChangedRecvParameters(
|
||||
if (params.is_stream_active) {
|
||||
const std::vector<VideoCodec> local_supported_codecs =
|
||||
GetPayloadTypesAndDefaultCodecs(decoder_factory_,
|
||||
/*is_decoder_factory=*/true,
|
||||
call_->trials());
|
||||
/*is_decoder_factory=*/true);
|
||||
for (const VideoCodecSettings& mapped_codec : mapped_codecs) {
|
||||
if (!FindMatchingCodec(local_supported_codecs, mapped_codec.codec)) {
|
||||
RTC_LOG(LS_ERROR)
|
||||
@ -1138,8 +1137,7 @@ bool WebRtcVideoChannel::GetChangedRecvParameters(
|
||||
|
||||
// Handle RTP header extensions.
|
||||
std::vector<webrtc::RtpExtension> filtered_extensions = FilterRtpExtensions(
|
||||
params.extensions, webrtc::RtpExtension::IsSupportedForVideo, false,
|
||||
call_->trials());
|
||||
params.extensions, webrtc::RtpExtension::IsSupportedForVideo, false);
|
||||
if (filtered_extensions != recv_rtp_extensions_) {
|
||||
changed_params->rtp_header_extensions =
|
||||
absl::optional<std::vector<webrtc::RtpExtension>>(filtered_extensions);
|
||||
@ -1468,7 +1466,7 @@ void WebRtcVideoChannel::ConfigureReceiverRtp(
|
||||
|
||||
// TODO(brandtr): Generalize when we add support for multistream protection.
|
||||
flexfec_config->payload_type = recv_flexfec_payload_type_;
|
||||
if (IsEnabled(call_->trials(), "WebRTC-FlexFEC-03-Advertised") &&
|
||||
if (IsFlexfecAdvertisedFieldTrialEnabled() &&
|
||||
sp.GetFecFrSsrc(ssrc, &flexfec_config->remote_ssrc)) {
|
||||
flexfec_config->protected_media_ssrcs = {ssrc};
|
||||
flexfec_config->local_ssrc = config->rtp.local_ssrc;
|
||||
@ -1701,7 +1699,7 @@ void WebRtcVideoChannel::BackfillBufferedPackets(
|
||||
int delivery_packet_error_cnt = 0;
|
||||
webrtc::PacketReceiver* receiver = this->call_->Receiver();
|
||||
unknown_ssrc_packet_buffer_->BackfillPackets(
|
||||
ssrcs, [&](uint32_t /*ssrc*/, int64_t packet_time_us,
|
||||
ssrcs, [&](uint32_t ssrc, int64_t packet_time_us,
|
||||
rtc::CopyOnWriteBuffer packet) {
|
||||
switch (receiver->DeliverPacket(webrtc::MediaType::VIDEO, packet,
|
||||
packet_time_us)) {
|
||||
@ -1762,7 +1760,7 @@ void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) {
|
||||
// which case that is used as UDP recevie buffer size. All other values shall
|
||||
// result in the default value being used.
|
||||
const std::string group_name_recv_buf_size =
|
||||
call_->trials().Lookup("WebRTC-IncreasedReceivebuffers");
|
||||
webrtc::field_trial::FindFullName("WebRTC-IncreasedReceivebuffers");
|
||||
int recv_buffer_size = kVideoRtpRecvBufferSize;
|
||||
if (!group_name_recv_buf_size.empty() &&
|
||||
(sscanf(group_name_recv_buf_size.c_str(), "%d", &recv_buffer_size) != 1 ||
|
||||
@ -1780,7 +1778,7 @@ void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) {
|
||||
// due to lack of socket buffer space, although it's not yet clear what the
|
||||
// ideal value should be.
|
||||
const std::string group_name_send_buf_size =
|
||||
call_->trials().Lookup("WebRTC-SendBufferSizeBytes");
|
||||
webrtc::field_trial::FindFullName("WebRTC-SendBufferSizeBytes");
|
||||
int send_buffer_size = kVideoRtpSendBufferSize;
|
||||
if (!group_name_send_buf_size.empty() &&
|
||||
(sscanf(group_name_send_buf_size.c_str(), "%d", &send_buffer_size) != 1 ||
|
||||
@ -1963,8 +1961,8 @@ WebRtcVideoChannel::WebRtcVideoSendStream::WebRtcVideoSendStream(
|
||||
parameters_(std::move(config), options, max_bitrate_bps, codec_settings),
|
||||
rtp_parameters_(CreateRtpParametersWithEncodings(sp)),
|
||||
sending_(false),
|
||||
disable_automatic_resize_(
|
||||
IsEnabled(call->trials(), "WebRTC-Video-DisableAutomaticResize")) {
|
||||
disable_automatic_resize_(webrtc::field_trial::IsEnabled(
|
||||
"WebRTC-Video-DisableAutomaticResize")) {
|
||||
// Maximum packet size may come in RtpConfig from external transport, for
|
||||
// example from QuicTransportInterface implementation, so do not exceed
|
||||
// given max_packet_size.
|
||||
@ -1985,7 +1983,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::WebRtcVideoSendStream(
|
||||
// FlexFEC SSRCs.
|
||||
// TODO(brandtr): This code needs to be generalized when we add support for
|
||||
// multistream protection.
|
||||
if (IsEnabled(call_->trials(), "WebRTC-FlexFEC-03")) {
|
||||
if (IsFlexfecFieldTrialEnabled()) {
|
||||
uint32_t flexfec_ssrc;
|
||||
bool flexfec_enabled = false;
|
||||
for (uint32_t primary_ssrc : parameters_.config.rtp.ssrcs) {
|
||||
@ -2090,7 +2088,8 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetDegradationPreference() const {
|
||||
webrtc::VideoTrackInterface::ContentHint::kText) {
|
||||
degradation_preference =
|
||||
webrtc::DegradationPreference::MAINTAIN_RESOLUTION;
|
||||
} else if (IsEnabled(call_->trials(), "WebRTC-Video-BalancedDegradation")) {
|
||||
} else if (webrtc::field_trial::IsEnabled(
|
||||
"WebRTC-Video-BalancedDegradation")) {
|
||||
// Standard wants balanced by default, but it needs to be tuned first.
|
||||
degradation_preference = webrtc::DegradationPreference::BALANCED;
|
||||
} else {
|
||||
@ -2341,7 +2340,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig(
|
||||
// or a screencast (and not in simulcast screenshare experiment), only
|
||||
// configure a single stream.
|
||||
encoder_config.number_of_streams = parameters_.config.rtp.ssrcs.size();
|
||||
if (IsCodecDisabledForSimulcast(codec.name, call_->trials())) {
|
||||
if (IsCodecDisabledForSimulcast(codec.name)) {
|
||||
encoder_config.number_of_streams = 1;
|
||||
}
|
||||
|
||||
@ -2936,7 +2935,8 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::RecreateWebRtcVideoStream() {
|
||||
MaybeAssociateFlexfecWithVideo();
|
||||
stream_->Start();
|
||||
|
||||
if (IsEnabled(call_->trials(), "WebRTC-Video-BufferPacketsWithUnknownSsrc")) {
|
||||
if (webrtc::field_trial::IsEnabled(
|
||||
"WebRTC-Video-BufferPacketsWithUnknownSsrc")) {
|
||||
channel_->BackfillBufferedPackets(stream_params_.ssrcs);
|
||||
}
|
||||
}
|
||||
@ -3387,18 +3387,15 @@ void WebRtcVideoChannel::SetDepacketizerToDecoderFrameTransformer(
|
||||
// TODO(bugs.webrtc.org/8785): Consider removing max_qp as member of
|
||||
// EncoderStreamFactory and instead set this value individually for each stream
|
||||
// in the VideoEncoderConfig.simulcast_layers.
|
||||
EncoderStreamFactory::EncoderStreamFactory(
|
||||
std::string codec_name,
|
||||
EncoderStreamFactory::EncoderStreamFactory(std::string codec_name,
|
||||
int max_qp,
|
||||
bool is_screenshare,
|
||||
bool conference_mode,
|
||||
const webrtc::WebRtcKeyValueConfig* trials)
|
||||
bool conference_mode)
|
||||
|
||||
: codec_name_(codec_name),
|
||||
max_qp_(max_qp),
|
||||
is_screenshare_(is_screenshare),
|
||||
conference_mode_(conference_mode),
|
||||
trials_(trials ? *trials : fallback_trials_) {}
|
||||
conference_mode_(conference_mode) {}
|
||||
|
||||
std::vector<webrtc::VideoStream> EncoderStreamFactory::CreateEncoderStreams(
|
||||
int width,
|
||||
@ -3520,7 +3517,7 @@ EncoderStreamFactory::CreateSimulcastOrConferenceModeScreenshareStreams(
|
||||
encoder_config.number_of_streams, width, height,
|
||||
encoder_config.bitrate_priority, max_qp_,
|
||||
is_screenshare_ && conference_mode_,
|
||||
temporal_layers_supported, trials_);
|
||||
temporal_layers_supported);
|
||||
// Allow an experiment to override the minimum bitrate for the lowest
|
||||
// spatial layer. The experiment's configuration has the lowest priority.
|
||||
if (experimental_min_bitrate) {
|
||||
|
||||
@ -19,7 +19,6 @@
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/call/transport.h"
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "api/video/video_bitrate_allocator_factory.h"
|
||||
#include "api/video/video_frame.h"
|
||||
#include "api/video/video_sink_interface.h"
|
||||
@ -99,8 +98,7 @@ class WebRtcVideoEngine : public VideoEngineInterface {
|
||||
// and external hardware codecs.
|
||||
WebRtcVideoEngine(
|
||||
std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory,
|
||||
std::unique_ptr<webrtc::VideoDecoderFactory> video_decoder_factory,
|
||||
const webrtc::WebRtcKeyValueConfig& trials);
|
||||
std::unique_ptr<webrtc::VideoDecoderFactory> video_decoder_factory);
|
||||
|
||||
~WebRtcVideoEngine() override;
|
||||
|
||||
@ -122,7 +120,6 @@ class WebRtcVideoEngine : public VideoEngineInterface {
|
||||
const std::unique_ptr<webrtc::VideoEncoderFactory> encoder_factory_;
|
||||
const std::unique_ptr<webrtc::VideoBitrateAllocatorFactory>
|
||||
bitrate_allocator_factory_;
|
||||
const webrtc::WebRtcKeyValueConfig& trials_;
|
||||
};
|
||||
|
||||
class WebRtcVideoChannel : public VideoMediaChannel,
|
||||
@ -569,7 +566,7 @@ class WebRtcVideoChannel : public VideoMediaChannel,
|
||||
void FillSendAndReceiveCodecStats(VideoMediaInfo* video_media_info)
|
||||
RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_);
|
||||
|
||||
rtc::Thread* const worker_thread_;
|
||||
rtc::Thread* worker_thread_;
|
||||
rtc::ThreadChecker thread_checker_;
|
||||
|
||||
uint32_t rtcp_receiver_report_ssrc_ RTC_GUARDED_BY(thread_checker_);
|
||||
@ -654,18 +651,7 @@ class EncoderStreamFactory
|
||||
EncoderStreamFactory(std::string codec_name,
|
||||
int max_qp,
|
||||
bool is_screenshare,
|
||||
bool conference_mode)
|
||||
: EncoderStreamFactory(codec_name,
|
||||
max_qp,
|
||||
is_screenshare,
|
||||
conference_mode,
|
||||
nullptr) {}
|
||||
|
||||
EncoderStreamFactory(std::string codec_name,
|
||||
int max_qp,
|
||||
bool is_screenshare,
|
||||
bool conference_mode,
|
||||
const webrtc::WebRtcKeyValueConfig* trials);
|
||||
bool conference_mode);
|
||||
|
||||
private:
|
||||
std::vector<webrtc::VideoStream> CreateEncoderStreams(
|
||||
@ -692,8 +678,6 @@ class EncoderStreamFactory
|
||||
// Allows a screenshare specific configuration, which enables temporal
|
||||
// layering and various settings.
|
||||
const bool conference_mode_;
|
||||
const webrtc::FieldTrialBasedConfig fallback_trials_;
|
||||
const webrtc::WebRtcKeyValueConfig& trials_;
|
||||
};
|
||||
|
||||
} // namespace cricket
|
||||
|
||||
@ -257,8 +257,7 @@ class WebRtcVideoEngineTest : public ::testing::Test {
|
||||
engine_(std::unique_ptr<cricket::FakeWebRtcVideoEncoderFactory>(
|
||||
encoder_factory_),
|
||||
std::unique_ptr<cricket::FakeWebRtcVideoDecoderFactory>(
|
||||
decoder_factory_),
|
||||
field_trials_) {
|
||||
decoder_factory_)) {
|
||||
// Ensure fake clock doesn't return 0, which will cause some initializations
|
||||
// fail inside RTP senders.
|
||||
fake_clock_.AdvanceTime(webrtc::TimeDelta::Micros(1));
|
||||
@ -1044,9 +1043,8 @@ TEST_F(WebRtcVideoEngineTest, GetSourcesWithNonExistingSsrc) {
|
||||
TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, NullFactories) {
|
||||
std::unique_ptr<webrtc::VideoEncoderFactory> encoder_factory;
|
||||
std::unique_ptr<webrtc::VideoDecoderFactory> decoder_factory;
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
WebRtcVideoEngine engine(std::move(encoder_factory),
|
||||
std::move(decoder_factory), trials);
|
||||
std::move(decoder_factory));
|
||||
EXPECT_EQ(0u, engine.send_codecs().size());
|
||||
EXPECT_EQ(0u, engine.recv_codecs().size());
|
||||
}
|
||||
@ -1057,10 +1055,9 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, EmptyFactories) {
|
||||
new webrtc::MockVideoEncoderFactory();
|
||||
webrtc::MockVideoDecoderFactory* decoder_factory =
|
||||
new webrtc::MockVideoDecoderFactory();
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
WebRtcVideoEngine engine(
|
||||
(std::unique_ptr<webrtc::VideoEncoderFactory>(encoder_factory)),
|
||||
(std::unique_ptr<webrtc::VideoDecoderFactory>(decoder_factory)), trials);
|
||||
(std::unique_ptr<webrtc::VideoDecoderFactory>(decoder_factory)));
|
||||
// TODO(kron): Change to Times(1) once send and receive codecs are changed
|
||||
// to be treated independently.
|
||||
EXPECT_CALL(*encoder_factory, GetSupportedFormats()).Times(1);
|
||||
@ -1088,10 +1085,9 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) {
|
||||
webrtc::kVideoCodecVP8)))
|
||||
.WillOnce(
|
||||
[] { return std::make_unique<webrtc::MockVideoBitrateAllocator>(); });
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
WebRtcVideoEngine engine(
|
||||
(std::unique_ptr<webrtc::VideoEncoderFactory>(encoder_factory)),
|
||||
(std::unique_ptr<webrtc::VideoDecoderFactory>(decoder_factory)), trials);
|
||||
(std::unique_ptr<webrtc::VideoDecoderFactory>(decoder_factory)));
|
||||
const webrtc::SdpVideoFormat vp8_format("VP8");
|
||||
const std::vector<webrtc::SdpVideoFormat> supported_formats = {vp8_format};
|
||||
EXPECT_CALL(*encoder_factory, GetSupportedFormats())
|
||||
@ -1208,10 +1204,9 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, NullDecoder) {
|
||||
std::unique_ptr<webrtc::MockVideoBitrateAllocatorFactory>
|
||||
rate_allocator_factory =
|
||||
std::make_unique<webrtc::MockVideoBitrateAllocatorFactory>();
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
WebRtcVideoEngine engine(
|
||||
(std::unique_ptr<webrtc::VideoEncoderFactory>(encoder_factory)),
|
||||
(std::unique_ptr<webrtc::VideoDecoderFactory>(decoder_factory)), trials);
|
||||
(std::unique_ptr<webrtc::VideoDecoderFactory>(decoder_factory)));
|
||||
const webrtc::SdpVideoFormat vp8_format("VP8");
|
||||
const std::vector<webrtc::SdpVideoFormat> supported_formats = {vp8_format};
|
||||
EXPECT_CALL(*encoder_factory, GetSupportedFormats())
|
||||
@ -1329,8 +1324,7 @@ class WebRtcVideoChannelEncodedFrameCallbackTest : public ::testing::Test {
|
||||
webrtc::CreateBuiltinVideoEncoderFactory(),
|
||||
std::make_unique<webrtc::test::FunctionVideoDecoderFactory>(
|
||||
[]() { return std::make_unique<webrtc::test::FakeDecoder>(); },
|
||||
kSdpVideoFormats),
|
||||
field_trials_),
|
||||
kSdpVideoFormats)),
|
||||
channel_(absl::WrapUnique(static_cast<cricket::WebRtcVideoChannel*>(
|
||||
engine_.CreateMediaChannel(
|
||||
call_.get(),
|
||||
@ -1462,8 +1456,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test {
|
||||
video_bitrate_allocator_factory_(
|
||||
webrtc::CreateBuiltinVideoBitrateAllocatorFactory()),
|
||||
engine_(webrtc::CreateBuiltinVideoEncoderFactory(),
|
||||
webrtc::CreateBuiltinVideoDecoderFactory(),
|
||||
field_trials_) {}
|
||||
webrtc::CreateBuiltinVideoDecoderFactory()) {}
|
||||
|
||||
virtual void SetUp() {
|
||||
// One testcase calls SetUp in a loop, only create call_ once.
|
||||
@ -8162,8 +8155,7 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test {
|
||||
engine_(std::unique_ptr<cricket::FakeWebRtcVideoEncoderFactory>(
|
||||
encoder_factory_),
|
||||
std::unique_ptr<cricket::FakeWebRtcVideoDecoderFactory>(
|
||||
decoder_factory_),
|
||||
field_trials_),
|
||||
decoder_factory_)),
|
||||
last_ssrc_(0) {}
|
||||
|
||||
void SetUp() override {
|
||||
@ -8223,7 +8215,7 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test {
|
||||
expected_streams = GetSimulcastConfig(
|
||||
/*min_layers=*/1, num_configured_streams, capture_width,
|
||||
capture_height, webrtc::kDefaultBitratePriority, kDefaultQpMax,
|
||||
screenshare && conference_mode, true, field_trials_);
|
||||
screenshare && conference_mode, true);
|
||||
if (screenshare && conference_mode) {
|
||||
for (const webrtc::VideoStream& stream : expected_streams) {
|
||||
// Never scale screen content.
|
||||
@ -8324,7 +8316,6 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test {
|
||||
cricket::FakeWebRtcVideoDecoderFactory* decoder_factory_;
|
||||
std::unique_ptr<webrtc::MockVideoBitrateAllocatorFactory>
|
||||
mock_rate_allocator_factory_;
|
||||
webrtc::FieldTrialBasedConfig field_trials_;
|
||||
WebRtcVideoEngine engine_;
|
||||
std::unique_ptr<VideoMediaChannel> channel_;
|
||||
uint32_t last_ssrc_;
|
||||
|
||||
@ -20,7 +20,6 @@
|
||||
#include "absl/strings/match.h"
|
||||
#include "api/audio_codecs/audio_codec_pair_id.h"
|
||||
#include "api/call/audio_sink.h"
|
||||
#include "api/transport/webrtc_key_value_config.h"
|
||||
#include "media/base/audio_source.h"
|
||||
#include "media/base/media_constants.h"
|
||||
#include "media/base/stream_params.h"
|
||||
@ -46,6 +45,7 @@
|
||||
#include "rtc_base/strings/string_format.h"
|
||||
#include "rtc_base/third_party/base64/base64.h"
|
||||
#include "rtc_base/trace_event.h"
|
||||
#include "system_wrappers/include/field_trial.h"
|
||||
#include "system_wrappers/include/metrics.h"
|
||||
|
||||
#if WEBRTC_ENABLE_PROTOBUF
|
||||
@ -111,6 +111,12 @@ std::string ToString(const AudioCodec& codec) {
|
||||
return ss.Release();
|
||||
}
|
||||
|
||||
// If this field trial is enabled, we will negotiate and use RFC 2198
|
||||
// redundancy for opus audio.
|
||||
bool IsAudioRedForOpusFieldTrialEnabled() {
|
||||
return webrtc::field_trial::IsEnabled("WebRTC-Audio-Red-For-Opus");
|
||||
}
|
||||
|
||||
bool IsCodec(const AudioCodec& codec, const char* ref_name) {
|
||||
return absl::EqualsIgnoreCase(codec.name, ref_name);
|
||||
}
|
||||
@ -197,11 +203,6 @@ absl::optional<int> ComputeSendBitrate(int max_send_bitrate_bps,
|
||||
}
|
||||
}
|
||||
|
||||
bool IsEnabled(const webrtc::WebRtcKeyValueConfig& config,
|
||||
absl::string_view trial) {
|
||||
return absl::StartsWith(config.Lookup(trial), "Enabled");
|
||||
}
|
||||
|
||||
struct AdaptivePtimeConfig {
|
||||
bool enabled = false;
|
||||
webrtc::DataRate min_payload_bitrate = webrtc::DataRate::KilobitsPerSec(16);
|
||||
@ -218,8 +219,9 @@ struct AdaptivePtimeConfig {
|
||||
"use_slow_adaptation", &use_slow_adaptation);
|
||||
}
|
||||
|
||||
explicit AdaptivePtimeConfig(const webrtc::WebRtcKeyValueConfig& trials) {
|
||||
Parser()->Parse(trials.Lookup("WebRTC-Audio-AdaptivePtime"));
|
||||
AdaptivePtimeConfig() {
|
||||
Parser()->Parse(
|
||||
webrtc::field_trial::FindFullName("WebRTC-Audio-AdaptivePtime"));
|
||||
#if WEBRTC_ENABLE_PROTOBUF
|
||||
webrtc::audio_network_adaptor::config::ControllerManager config;
|
||||
auto* frame_length_controller =
|
||||
@ -241,18 +243,13 @@ WebRtcVoiceEngine::WebRtcVoiceEngine(
|
||||
const rtc::scoped_refptr<webrtc::AudioEncoderFactory>& encoder_factory,
|
||||
const rtc::scoped_refptr<webrtc::AudioDecoderFactory>& decoder_factory,
|
||||
rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer,
|
||||
rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing,
|
||||
const webrtc::WebRtcKeyValueConfig& trials)
|
||||
rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing)
|
||||
: task_queue_factory_(task_queue_factory),
|
||||
adm_(adm),
|
||||
encoder_factory_(encoder_factory),
|
||||
decoder_factory_(decoder_factory),
|
||||
audio_mixer_(audio_mixer),
|
||||
apm_(audio_processing),
|
||||
audio_red_for_opus_trial_enabled_(
|
||||
IsEnabled(trials, "WebRTC-Audio-Red-For-Opus")),
|
||||
minimized_remsampling_on_mobile_trial_enabled_(
|
||||
IsEnabled(trials, "WebRTC-Audio-MinimizeResamplingOnMobile")) {
|
||||
apm_(audio_processing) {
|
||||
// This may be called from any thread, so detach thread checkers.
|
||||
worker_thread_checker_.Detach();
|
||||
signal_thread_checker_.Detach();
|
||||
@ -418,7 +415,8 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
|
||||
// performed inside the audio processing module on mobile platforms by
|
||||
// whenever possible turning off the fixed AGC mode and the high-pass filter.
|
||||
// (https://bugs.chromium.org/p/webrtc/issues/detail?id=6181).
|
||||
if (minimized_remsampling_on_mobile_trial_enabled_) {
|
||||
if (webrtc::field_trial::IsEnabled(
|
||||
"WebRTC-Audio-MinimizeResamplingOnMobile")) {
|
||||
options.auto_gain_control = false;
|
||||
RTC_LOG(LS_INFO) << "Disable AGC according to field trial.";
|
||||
if (!(options.noise_suppression.value_or(false) ||
|
||||
@ -724,7 +722,8 @@ std::vector<AudioCodec> WebRtcVoiceEngine::CollectCodecs(
|
||||
|
||||
out.push_back(codec);
|
||||
|
||||
if (codec.name == kOpusCodecName && audio_red_for_opus_trial_enabled_) {
|
||||
if (codec.name == kOpusCodecName &&
|
||||
IsAudioRedForOpusFieldTrialEnabled()) {
|
||||
map_format({kRedCodecName, 48000, 2}, &out);
|
||||
}
|
||||
}
|
||||
@ -768,8 +767,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
|
||||
const absl::optional<webrtc::AudioCodecPairId> codec_pair_id,
|
||||
rtc::scoped_refptr<webrtc::FrameEncryptorInterface> frame_encryptor,
|
||||
const webrtc::CryptoOptions& crypto_options)
|
||||
: adaptive_ptime_config_(call->trials()),
|
||||
call_(call),
|
||||
: call_(call),
|
||||
config_(send_transport),
|
||||
max_send_bitrate_bps_(max_send_bitrate_bps),
|
||||
rtp_parameters_(CreateRtpParametersWithOneEncoding()) {
|
||||
@ -1370,9 +1368,7 @@ WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel(
|
||||
engine_(engine),
|
||||
call_(call),
|
||||
audio_config_(config.audio),
|
||||
crypto_options_(crypto_options),
|
||||
audio_red_for_opus_trial_enabled_(
|
||||
IsEnabled(call->trials(), "WebRTC-Audio-Red-For-Opus")) {
|
||||
crypto_options_(crypto_options) {
|
||||
RTC_LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel";
|
||||
RTC_DCHECK(call);
|
||||
engine->RegisterChannel(this);
|
||||
@ -1419,8 +1415,7 @@ bool WebRtcVoiceMediaChannel::SetSendParameters(
|
||||
}
|
||||
|
||||
std::vector<webrtc::RtpExtension> filtered_extensions = FilterRtpExtensions(
|
||||
params.extensions, webrtc::RtpExtension::IsSupportedForAudio, true,
|
||||
call_->trials());
|
||||
params.extensions, webrtc::RtpExtension::IsSupportedForAudio, true);
|
||||
if (send_rtp_extensions_ != filtered_extensions) {
|
||||
send_rtp_extensions_.swap(filtered_extensions);
|
||||
for (auto& it : send_streams_) {
|
||||
@ -1457,8 +1452,7 @@ bool WebRtcVoiceMediaChannel::SetRecvParameters(
|
||||
return false;
|
||||
}
|
||||
std::vector<webrtc::RtpExtension> filtered_extensions = FilterRtpExtensions(
|
||||
params.extensions, webrtc::RtpExtension::IsSupportedForAudio, false,
|
||||
call_->trials());
|
||||
params.extensions, webrtc::RtpExtension::IsSupportedForAudio, false);
|
||||
if (recv_rtp_extensions_ != filtered_extensions) {
|
||||
recv_rtp_extensions_.swap(filtered_extensions);
|
||||
for (auto& it : recv_streams_) {
|
||||
@ -1634,7 +1628,7 @@ bool WebRtcVoiceMediaChannel::SetRecvCodecs(
|
||||
}
|
||||
auto format = AudioCodecToSdpAudioFormat(codec);
|
||||
if (!IsCodec(codec, kCnCodecName) && !IsCodec(codec, kDtmfCodecName) &&
|
||||
(!audio_red_for_opus_trial_enabled_ ||
|
||||
(!IsAudioRedForOpusFieldTrialEnabled() ||
|
||||
!IsCodec(codec, kRedCodecName)) &&
|
||||
!engine()->decoder_factory_->IsSupportedDecoder(format)) {
|
||||
RTC_LOG(LS_ERROR) << "Unsupported codec: " << rtc::ToString(format);
|
||||
@ -1788,7 +1782,7 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
|
||||
}
|
||||
}
|
||||
|
||||
if (audio_red_for_opus_trial_enabled_) {
|
||||
if (IsAudioRedForOpusFieldTrialEnabled()) {
|
||||
// Loop through the codecs to find the RED codec that matches opus
|
||||
// with respect to clockrate and number of channels.
|
||||
size_t red_codec_position = 0;
|
||||
|
||||
@ -20,7 +20,6 @@
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/task_queue/task_queue_factory.h"
|
||||
#include "api/transport/rtp/rtp_source.h"
|
||||
#include "api/transport/webrtc_key_value_config.h"
|
||||
#include "call/audio_state.h"
|
||||
#include "call/call.h"
|
||||
#include "media/base/media_engine.h"
|
||||
@ -50,8 +49,7 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface {
|
||||
const rtc::scoped_refptr<webrtc::AudioEncoderFactory>& encoder_factory,
|
||||
const rtc::scoped_refptr<webrtc::AudioDecoderFactory>& decoder_factory,
|
||||
rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer,
|
||||
rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing,
|
||||
const webrtc::WebRtcKeyValueConfig& trials);
|
||||
rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing);
|
||||
~WebRtcVoiceEngine() override;
|
||||
|
||||
// Does initialization that needs to occur on the worker thread.
|
||||
@ -129,11 +127,6 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface {
|
||||
int audio_jitter_buffer_min_delay_ms_ = 0;
|
||||
bool audio_jitter_buffer_enable_rtx_handling_ = false;
|
||||
|
||||
// If this field trial is enabled, we will negotiate and use RFC 2198
|
||||
// redundancy for opus audio.
|
||||
const bool audio_red_for_opus_trial_enabled_;
|
||||
const bool minimized_remsampling_on_mobile_trial_enabled_;
|
||||
|
||||
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcVoiceEngine);
|
||||
};
|
||||
|
||||
@ -338,8 +331,6 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel,
|
||||
rtc::scoped_refptr<webrtc::FrameDecryptorInterface>
|
||||
unsignaled_frame_decryptor_;
|
||||
|
||||
const bool audio_red_for_opus_trial_enabled_;
|
||||
|
||||
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcVoiceMediaChannel);
|
||||
};
|
||||
} // namespace cricket
|
||||
|
||||
@ -158,12 +158,10 @@ TEST(WebRtcVoiceEngineTestStubLibrary, StartupShutdown) {
|
||||
EXPECT_CALL(*apm, DetachAecDump());
|
||||
}
|
||||
{
|
||||
webrtc::FieldTrialBasedConfig trials;
|
||||
cricket::WebRtcVoiceEngine engine(
|
||||
task_queue_factory.get(), adm,
|
||||
webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
|
||||
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm,
|
||||
trials);
|
||||
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm);
|
||||
engine.Init();
|
||||
}
|
||||
}
|
||||
@ -212,7 +210,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam<bool> {
|
||||
auto decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory();
|
||||
engine_.reset(new cricket::WebRtcVoiceEngine(
|
||||
task_queue_factory_.get(), adm_, encoder_factory, decoder_factory,
|
||||
nullptr, apm_, trials_config_));
|
||||
nullptr, apm_));
|
||||
engine_->Init();
|
||||
send_parameters_.codecs.push_back(kPcmuCodec);
|
||||
recv_parameters_.codecs.push_back(kPcmuCodec);
|
||||
@ -803,7 +801,6 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam<bool> {
|
||||
|
||||
private:
|
||||
webrtc::test::ScopedFieldTrials override_field_trials_;
|
||||
webrtc::FieldTrialBasedConfig trials_config_;
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(TestBothWithAndWithoutNullApm,
|
||||
@ -3631,15 +3628,14 @@ TEST(WebRtcVoiceEngineTest, StartupShutdown) {
|
||||
webrtc::test::MockAudioDeviceModule::CreateNice();
|
||||
rtc::scoped_refptr<webrtc::AudioProcessing> apm =
|
||||
use_null_apm ? nullptr : webrtc::AudioProcessingBuilder().Create();
|
||||
webrtc::FieldTrialBasedConfig field_trials;
|
||||
cricket::WebRtcVoiceEngine engine(
|
||||
task_queue_factory.get(), adm,
|
||||
webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
|
||||
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm,
|
||||
field_trials);
|
||||
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm);
|
||||
engine.Init();
|
||||
webrtc::RtcEventLogNull event_log;
|
||||
webrtc::Call::Config call_config(&event_log);
|
||||
webrtc::FieldTrialBasedConfig field_trials;
|
||||
call_config.trials = &field_trials;
|
||||
call_config.task_queue_factory = task_queue_factory.get();
|
||||
auto call = absl::WrapUnique(webrtc::Call::Create(call_config));
|
||||
@ -3663,15 +3659,14 @@ TEST(WebRtcVoiceEngineTest, StartupShutdownWithExternalADM) {
|
||||
{
|
||||
rtc::scoped_refptr<webrtc::AudioProcessing> apm =
|
||||
use_null_apm ? nullptr : webrtc::AudioProcessingBuilder().Create();
|
||||
webrtc::FieldTrialBasedConfig field_trials;
|
||||
cricket::WebRtcVoiceEngine engine(
|
||||
task_queue_factory.get(), adm,
|
||||
webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
|
||||
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm,
|
||||
field_trials);
|
||||
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm);
|
||||
engine.Init();
|
||||
webrtc::RtcEventLogNull event_log;
|
||||
webrtc::Call::Config call_config(&event_log);
|
||||
webrtc::FieldTrialBasedConfig field_trials;
|
||||
call_config.trials = &field_trials;
|
||||
call_config.task_queue_factory = task_queue_factory.get();
|
||||
auto call = absl::WrapUnique(webrtc::Call::Create(call_config));
|
||||
@ -3697,12 +3692,10 @@ TEST(WebRtcVoiceEngineTest, HasCorrectPayloadTypeMapping) {
|
||||
webrtc::test::MockAudioDeviceModule::CreateNice();
|
||||
rtc::scoped_refptr<webrtc::AudioProcessing> apm =
|
||||
use_null_apm ? nullptr : webrtc::AudioProcessingBuilder().Create();
|
||||
webrtc::FieldTrialBasedConfig field_trials;
|
||||
cricket::WebRtcVoiceEngine engine(
|
||||
task_queue_factory.get(), adm,
|
||||
webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
|
||||
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm,
|
||||
field_trials);
|
||||
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm);
|
||||
engine.Init();
|
||||
for (const cricket::AudioCodec& codec : engine.send_codecs()) {
|
||||
auto is_codec = [&codec](const char* name, int clockrate = 0) {
|
||||
@ -3750,15 +3743,14 @@ TEST(WebRtcVoiceEngineTest, Has32Channels) {
|
||||
webrtc::test::MockAudioDeviceModule::CreateNice();
|
||||
rtc::scoped_refptr<webrtc::AudioProcessing> apm =
|
||||
use_null_apm ? nullptr : webrtc::AudioProcessingBuilder().Create();
|
||||
webrtc::FieldTrialBasedConfig field_trials;
|
||||
cricket::WebRtcVoiceEngine engine(
|
||||
task_queue_factory.get(), adm,
|
||||
webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
|
||||
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm,
|
||||
field_trials);
|
||||
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm);
|
||||
engine.Init();
|
||||
webrtc::RtcEventLogNull event_log;
|
||||
webrtc::Call::Config call_config(&event_log);
|
||||
webrtc::FieldTrialBasedConfig field_trials;
|
||||
call_config.trials = &field_trials;
|
||||
call_config.task_queue_factory = task_queue_factory.get();
|
||||
auto call = absl::WrapUnique(webrtc::Call::Create(call_config));
|
||||
@ -3799,14 +3791,14 @@ TEST(WebRtcVoiceEngineTest, SetRecvCodecs) {
|
||||
webrtc::test::MockAudioDeviceModule::CreateNice();
|
||||
rtc::scoped_refptr<webrtc::AudioProcessing> apm =
|
||||
use_null_apm ? nullptr : webrtc::AudioProcessingBuilder().Create();
|
||||
webrtc::FieldTrialBasedConfig field_trials;
|
||||
cricket::WebRtcVoiceEngine engine(
|
||||
task_queue_factory.get(), adm,
|
||||
webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
|
||||
webrtc::CreateBuiltinAudioDecoderFactory(), nullptr, apm, field_trials);
|
||||
webrtc::CreateBuiltinAudioDecoderFactory(), nullptr, apm);
|
||||
engine.Init();
|
||||
webrtc::RtcEventLogNull event_log;
|
||||
webrtc::Call::Config call_config(&event_log);
|
||||
webrtc::FieldTrialBasedConfig field_trials;
|
||||
call_config.trials = &field_trials;
|
||||
call_config.task_queue_factory = task_queue_factory.get();
|
||||
auto call = absl::WrapUnique(webrtc::Call::Create(call_config));
|
||||
@ -3851,10 +3843,9 @@ TEST(WebRtcVoiceEngineTest, CollectRecvCodecs) {
|
||||
|
||||
rtc::scoped_refptr<webrtc::AudioProcessing> apm =
|
||||
use_null_apm ? nullptr : webrtc::AudioProcessingBuilder().Create();
|
||||
webrtc::FieldTrialBasedConfig field_trials;
|
||||
cricket::WebRtcVoiceEngine engine(
|
||||
task_queue_factory.get(), adm, unused_encoder_factory,
|
||||
mock_decoder_factory, nullptr, apm, field_trials);
|
||||
cricket::WebRtcVoiceEngine engine(task_queue_factory.get(), adm,
|
||||
unused_encoder_factory,
|
||||
mock_decoder_factory, nullptr, apm);
|
||||
engine.Init();
|
||||
auto codecs = engine.recv_codecs();
|
||||
EXPECT_EQ(11u, codecs.size());
|
||||
|
||||
@ -759,7 +759,6 @@ if (rtc_include_tests) {
|
||||
"../../api:array_view",
|
||||
"../../api:videocodec_test_fixture_api",
|
||||
"../../api/test/video:function_video_factory",
|
||||
"../../api/transport:field_trial_based_config",
|
||||
"../../api/video:video_bitrate_allocation",
|
||||
"../../api/video_codecs:video_codecs_api",
|
||||
"../../call:video_stream_api",
|
||||
|
||||
@ -22,7 +22,6 @@
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/array_view.h"
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "api/video/video_bitrate_allocation.h"
|
||||
#include "api/video_codecs/sdp_video_format.h"
|
||||
#include "api/video_codecs/video_codec.h"
|
||||
@ -62,11 +61,10 @@ const int kMaxFramerateFps = 30;
|
||||
const int kMaxQp = 56;
|
||||
|
||||
void ConfigureSimulcast(VideoCodec* codec_settings) {
|
||||
FieldTrialBasedConfig trials;
|
||||
const std::vector<webrtc::VideoStream> streams = cricket::GetSimulcastConfig(
|
||||
/*min_layer=*/1, codec_settings->numberOfSimulcastStreams,
|
||||
codec_settings->width, codec_settings->height, kBitratePriority, kMaxQp,
|
||||
/* is_screenshare = */ false, true, trials);
|
||||
/* is_screenshare = */ false, true);
|
||||
|
||||
for (size_t i = 0; i < streams.size(); ++i) {
|
||||
SpatialLayer* ss = &codec_settings->simulcastStream[i];
|
||||
|
||||
@ -584,7 +584,6 @@ if (rtc_include_tests) {
|
||||
"../api/rtc_event_log",
|
||||
"../api/rtc_event_log:rtc_event_log_factory",
|
||||
"../api/task_queue:default_task_queue_factory",
|
||||
"../api/transport:field_trial_based_config",
|
||||
"../api/transport/rtp:rtp_source",
|
||||
"../api/units:time_delta",
|
||||
"../api/video:builtin_video_bitrate_allocator_factory",
|
||||
|
||||
@ -58,6 +58,7 @@
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
#include "rtc_base/trace_event.h"
|
||||
#include "system_wrappers/include/clock.h"
|
||||
#include "system_wrappers/include/field_trial.h"
|
||||
#include "system_wrappers/include/metrics.h"
|
||||
|
||||
using cricket::ContentInfo;
|
||||
@ -4343,9 +4344,7 @@ bool PeerConnection::StartRtcEventLog(std::unique_ptr<RtcEventLogOutput> output,
|
||||
bool PeerConnection::StartRtcEventLog(
|
||||
std::unique_ptr<RtcEventLogOutput> output) {
|
||||
int64_t output_period_ms = webrtc::RtcEventLog::kImmediateOutput;
|
||||
if (absl::StartsWith(
|
||||
call_ptr_->trials().Lookup("WebRTC-RtcEventLogNewFormat"),
|
||||
"Enabled")) {
|
||||
if (field_trial::IsEnabled("WebRTC-RtcEventLogNewFormat")) {
|
||||
output_period_ms = 5000;
|
||||
}
|
||||
return StartRtcEventLog(std::move(output), output_period_ms);
|
||||
@ -5788,7 +5787,8 @@ PeerConnection::InitializePortAllocator_n(
|
||||
// by experiment.
|
||||
if (configuration.disable_ipv6) {
|
||||
port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6);
|
||||
} else if (absl::StartsWith(call_ptr_->trials().Lookup("WebRTC-IPv6Default"),
|
||||
} else if (absl::StartsWith(
|
||||
webrtc::field_trial::FindFullName("WebRTC-IPv6Default"),
|
||||
"Disabled")) {
|
||||
port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6);
|
||||
}
|
||||
|
||||
@ -87,8 +87,6 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface {
|
||||
|
||||
const Options& options() const { return options_; }
|
||||
|
||||
const WebRtcKeyValueConfig& trials() const { return *trials_.get(); }
|
||||
|
||||
protected:
|
||||
// This structure allows simple management of all new dependencies being added
|
||||
// to the PeerConnectionFactory.
|
||||
|
||||
@ -28,7 +28,6 @@
|
||||
#include "api/rtc_event_log/rtc_event_log_factory.h"
|
||||
#include "api/rtp_receiver_interface.h"
|
||||
#include "api/task_queue/default_task_queue_factory.h"
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "api/uma_metrics.h"
|
||||
#include "api/video_codecs/sdp_video_format.h"
|
||||
#include "call/call.h"
|
||||
@ -634,7 +633,6 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver,
|
||||
pc_factory_dependencies.signaling_thread = signaling_thread;
|
||||
pc_factory_dependencies.task_queue_factory =
|
||||
webrtc::CreateDefaultTaskQueueFactory();
|
||||
pc_factory_dependencies.trials = std::make_unique<FieldTrialBasedConfig>();
|
||||
cricket::MediaEngineDependencies media_deps;
|
||||
media_deps.task_queue_factory =
|
||||
pc_factory_dependencies.task_queue_factory.get();
|
||||
@ -654,8 +652,6 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver,
|
||||
media_deps.audio_processing = AudioProcessingBuilderForTesting().Create();
|
||||
}
|
||||
|
||||
media_deps.trials = pc_factory_dependencies.trials.get();
|
||||
|
||||
pc_factory_dependencies.media_engine =
|
||||
cricket::CreateMediaEngine(std::move(media_deps));
|
||||
pc_factory_dependencies.call_factory = webrtc::CreateCallFactory();
|
||||
|
||||
@ -43,7 +43,6 @@
|
||||
#include "api/rtp_transceiver_interface.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/task_queue/default_task_queue_factory.h"
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "api/video_codecs/builtin_video_decoder_factory.h"
|
||||
#include "api/video_codecs/builtin_video_encoder_factory.h"
|
||||
#include "api/video_codecs/video_decoder_factory.h"
|
||||
@ -647,14 +646,12 @@ class PeerConnectionFactoryForTest : public webrtc::PeerConnectionFactory {
|
||||
dependencies.network_thread = rtc::Thread::Current();
|
||||
dependencies.signaling_thread = rtc::Thread::Current();
|
||||
dependencies.task_queue_factory = CreateDefaultTaskQueueFactory();
|
||||
dependencies.trials = std::make_unique<FieldTrialBasedConfig>();
|
||||
cricket::MediaEngineDependencies media_deps;
|
||||
media_deps.task_queue_factory = dependencies.task_queue_factory.get();
|
||||
// Use fake audio device module since we're only testing the interface
|
||||
// level, and using a real one could make tests flaky when run in parallel.
|
||||
media_deps.adm = FakeAudioCaptureModule::Create();
|
||||
SetMediaEngineDefaults(&media_deps);
|
||||
media_deps.trials = dependencies.trials.get();
|
||||
dependencies.media_engine =
|
||||
cricket::CreateMediaEngine(std::move(media_deps));
|
||||
dependencies.call_factory = webrtc::CreateCallFactory();
|
||||
|
||||
@ -11,7 +11,6 @@
|
||||
#include <memory>
|
||||
|
||||
#include "api/task_queue/default_task_queue_factory.h"
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "media/engine/webrtc_media_engine.h"
|
||||
#include "media/engine/webrtc_media_engine_defaults.h"
|
||||
#include "pc/media_session.h"
|
||||
@ -48,11 +47,9 @@ PeerConnectionFactoryDependencies CreatePeerConnectionFactoryDependencies() {
|
||||
dependencies.network_thread = rtc::Thread::Current();
|
||||
dependencies.signaling_thread = rtc::Thread::Current();
|
||||
dependencies.task_queue_factory = CreateDefaultTaskQueueFactory();
|
||||
dependencies.trials = std::make_unique<FieldTrialBasedConfig>();
|
||||
cricket::MediaEngineDependencies media_deps;
|
||||
media_deps.task_queue_factory = dependencies.task_queue_factory.get();
|
||||
media_deps.adm = FakeAudioCaptureModule::Create();
|
||||
media_deps.trials = dependencies.trials.get();
|
||||
SetMediaEngineDefaults(&media_deps);
|
||||
dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
|
||||
dependencies.call_factory = CreateCallFactory();
|
||||
|
||||
@ -973,7 +973,6 @@ if (is_ios || is_mac) {
|
||||
"../api/crypto:frame_encryptor_interface",
|
||||
"../api/rtc_event_log:rtc_event_log_factory",
|
||||
"../api/task_queue:default_task_queue_factory",
|
||||
"../api/transport:field_trial_based_config",
|
||||
"../api/video:video_frame",
|
||||
"../api/video:video_rtp_headers",
|
||||
"../api/video_codecs:video_codecs_api",
|
||||
|
||||
@ -37,7 +37,6 @@
|
||||
#include "api/audio_codecs/builtin_audio_encoder_factory.h" // nogncheck
|
||||
#include "api/rtc_event_log/rtc_event_log_factory.h"
|
||||
#include "api/task_queue/default_task_queue_factory.h"
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "modules/audio_device/include/audio_device.h" // nogncheck
|
||||
#include "modules/audio_processing/include/audio_processing.h" // nogncheck
|
||||
|
||||
@ -191,7 +190,6 @@
|
||||
}
|
||||
#ifndef HAVE_NO_MEDIA
|
||||
dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory();
|
||||
dependencies.trials = std::make_unique<webrtc::FieldTrialBasedConfig>();
|
||||
cricket::MediaEngineDependencies media_deps;
|
||||
media_deps.adm = std::move(audioDeviceModule);
|
||||
media_deps.task_queue_factory = dependencies.task_queue_factory.get();
|
||||
@ -204,7 +202,6 @@
|
||||
} else {
|
||||
media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create();
|
||||
}
|
||||
media_deps.trials = dependencies.trials.get();
|
||||
dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
|
||||
dependencies.call_factory = webrtc::CreateCallFactory();
|
||||
dependencies.event_log_factory =
|
||||
|
||||
@ -93,7 +93,6 @@ rtc_library("network_emulation_pc_unittest") {
|
||||
"../../api:simulated_network_api",
|
||||
"../../api/rtc_event_log:rtc_event_log_factory",
|
||||
"../../api/task_queue:default_task_queue_factory",
|
||||
"../../api/transport:field_trial_based_config",
|
||||
"../../call:simulated_network",
|
||||
"../../media:rtc_audio_video",
|
||||
"../../media:rtc_media_engine_defaults",
|
||||
|
||||
@ -16,7 +16,6 @@
|
||||
#include "api/rtc_event_log/rtc_event_log_factory.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/task_queue/default_task_queue_factory.h"
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "call/simulated_network.h"
|
||||
#include "media/engine/webrtc_media_engine.h"
|
||||
#include "media/engine/webrtc_media_engine_defaults.h"
|
||||
@ -60,7 +59,6 @@ rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
|
||||
std::make_unique<RtcEventLogFactory>(pcf_deps.task_queue_factory.get());
|
||||
pcf_deps.network_thread = network_thread;
|
||||
pcf_deps.signaling_thread = signaling_thread;
|
||||
pcf_deps.trials = std::make_unique<FieldTrialBasedConfig>();
|
||||
cricket::MediaEngineDependencies media_deps;
|
||||
media_deps.task_queue_factory = pcf_deps.task_queue_factory.get();
|
||||
media_deps.adm = TestAudioDeviceModule::Create(
|
||||
@ -69,7 +67,6 @@ rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
|
||||
kSamplingFrequency),
|
||||
TestAudioDeviceModule::CreateDiscardRenderer(kSamplingFrequency),
|
||||
/*speed=*/1.f);
|
||||
media_deps.trials = pcf_deps.trials.get();
|
||||
SetMediaEngineDefaults(&media_deps);
|
||||
pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
|
||||
return CreateModularPeerConnectionFactory(std::move(pcf_deps));
|
||||
|
||||
@ -58,7 +58,6 @@ if (!build_with_chromium) {
|
||||
"../../../api/rtc_event_log",
|
||||
"../../../api/task_queue",
|
||||
"../../../api/transport:network_control",
|
||||
"../../../api/transport:webrtc_key_value_config",
|
||||
"../../../api/video_codecs:video_codecs_api",
|
||||
"../../../rtc_base",
|
||||
]
|
||||
@ -270,7 +269,6 @@ if (!build_with_chromium) {
|
||||
"../../../api:time_controller",
|
||||
"../../../api/rtc_event_log:rtc_event_log_factory",
|
||||
"../../../api/task_queue:default_task_queue_factory",
|
||||
"../../../api/transport:field_trial_based_config",
|
||||
"../../../api/video_codecs:builtin_video_decoder_factory",
|
||||
"../../../api/video_codecs:builtin_video_encoder_factory",
|
||||
"../../../media:rtc_audio_video",
|
||||
|
||||
@ -21,7 +21,6 @@
|
||||
#include "api/task_queue/task_queue_factory.h"
|
||||
#include "api/test/peerconnection_quality_test_fixture.h"
|
||||
#include "api/transport/network_control.h"
|
||||
#include "api/transport/webrtc_key_value_config.h"
|
||||
#include "api/video_codecs/video_decoder_factory.h"
|
||||
#include "api/video_codecs/video_encoder_factory.h"
|
||||
#include "rtc_base/network.h"
|
||||
@ -53,8 +52,6 @@ struct PeerConnectionFactoryComponents {
|
||||
// PeerConnectionFactory.
|
||||
std::unique_ptr<VideoEncoderFactory> video_encoder_factory;
|
||||
std::unique_ptr<VideoDecoderFactory> video_decoder_factory;
|
||||
|
||||
std::unique_ptr<WebRtcKeyValueConfig> trials;
|
||||
};
|
||||
|
||||
// Contains most parts from PeerConnectionDependencies. Also all fields are
|
||||
|
||||
@ -16,7 +16,6 @@
|
||||
#include "api/task_queue/default_task_queue_factory.h"
|
||||
#include "api/test/create_time_controller.h"
|
||||
#include "api/test/time_controller.h"
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "api/video_codecs/builtin_video_decoder_factory.h"
|
||||
#include "api/video_codecs/builtin_video_encoder_factory.h"
|
||||
#include "media/engine/webrtc_media_engine.h"
|
||||
@ -65,10 +64,6 @@ void SetMandatoryEntities(InjectableComponents* components,
|
||||
std::make_unique<RtcEventLogFactory>(
|
||||
components->pcf_dependencies->task_queue_factory.get());
|
||||
}
|
||||
if (!components->pcf_dependencies->trials) {
|
||||
components->pcf_dependencies->trials =
|
||||
std::make_unique<FieldTrialBasedConfig>();
|
||||
}
|
||||
}
|
||||
|
||||
// Returns mapping from stream label to optional spatial index.
|
||||
@ -178,9 +173,6 @@ std::unique_ptr<cricket::MediaEngineInterface> CreateMediaEngine(
|
||||
media_deps.video_decoder_factory =
|
||||
std::move(pcf_dependencies->video_decoder_factory);
|
||||
webrtc::SetMediaEngineDefaults(&media_deps);
|
||||
RTC_DCHECK(pcf_dependencies->trials);
|
||||
media_deps.trials = pcf_dependencies->trials.get();
|
||||
|
||||
return cricket::CreateMediaEngine(std::move(media_deps));
|
||||
}
|
||||
|
||||
@ -246,9 +238,6 @@ PeerConnectionFactoryDependencies CreatePCFDependencies(
|
||||
if (pcf_dependencies->neteq_factory != nullptr) {
|
||||
pcf_deps.neteq_factory = std::move(pcf_dependencies->neteq_factory);
|
||||
}
|
||||
if (pcf_dependencies->trials != nullptr) {
|
||||
pcf_deps.trials = std::move(pcf_dependencies->trials);
|
||||
}
|
||||
|
||||
return pcf_deps;
|
||||
}
|
||||
|
||||
@ -37,7 +37,6 @@ if (rtc_include_tests) {
|
||||
"../../api/audio_codecs:builtin_audio_encoder_factory",
|
||||
"../../api/rtc_event_log:rtc_event_log_factory",
|
||||
"../../api/task_queue:default_task_queue_factory",
|
||||
"../../api/transport:field_trial_based_config",
|
||||
"../../api/video_codecs:builtin_video_decoder_factory",
|
||||
"../../api/video_codecs:builtin_video_encoder_factory",
|
||||
"../../media:rtc_audio_video",
|
||||
|
||||
@ -18,7 +18,6 @@
|
||||
#include "api/rtc_event_log/rtc_event_log_factory.h"
|
||||
#include "api/task_queue/default_task_queue_factory.h"
|
||||
#include "api/test/create_time_controller.h"
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "api/video_codecs/builtin_video_decoder_factory.h"
|
||||
#include "api/video_codecs/builtin_video_encoder_factory.h"
|
||||
#include "media/engine/webrtc_media_engine.h"
|
||||
@ -198,7 +197,6 @@ PeerScenarioClient::PeerScenarioClient(
|
||||
net->time_controller()->CreateTaskQueueFactory();
|
||||
pcf_deps.event_log_factory =
|
||||
std::make_unique<RtcEventLogFactory>(task_queue_factory_);
|
||||
pcf_deps.trials = std::make_unique<FieldTrialBasedConfig>();
|
||||
|
||||
cricket::MediaEngineDependencies media_deps;
|
||||
media_deps.task_queue_factory = task_queue_factory_;
|
||||
@ -223,7 +221,6 @@ PeerScenarioClient::PeerScenarioClient(
|
||||
}
|
||||
media_deps.audio_encoder_factory = CreateBuiltinAudioEncoderFactory();
|
||||
media_deps.audio_decoder_factory = CreateBuiltinAudioDecoderFactory();
|
||||
media_deps.trials = pcf_deps.trials.get();
|
||||
|
||||
pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
|
||||
pcf_deps.fec_controller_factory = nullptr;
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user