Reland "Prefix flag macros with WEBRTC_."

This is a reland of 5ccdc1331fcc3cd78eaa14408fe0c38d37a5a51d

Original change's description:
> Prefix flag macros with WEBRTC_.
>
> Macros defined in rtc_base/flags.h are intended to be used to define
> flags in WebRTC's binaries (e.g. tests).
>
> They are currently not prefixed and this could cause problems with
> downstream clients since these names are quite common.
>
> This CL adds the 'WEBRTC_' prefix to them.
>
> Generated with:
>
> for x in DECLARE DEFINE; do
>   for y in bool int float string FLAG; do
>     git grep -l "\b$x\_$y\b" | \
>     xargs sed -i "s/\b$x\_$y\b/WEBRTC_$x\_$y/g"
>   done
> done
> git cl format
>
> Bug: webrtc:9884
> Change-Id: I7b524762b6a3e5aa5b2fc2395edd3e1a0fe72591
> Reviewed-on: https://webrtc-review.googlesource.com/c/106682
> Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
> Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#25270}

TBR=kwiberg@webrtc.org

Bug: webrtc:9884
Change-Id: I5ba5368a231a334d135ed5e6fd7a279629ced8a3
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://webrtc-review.googlesource.com/c/107161
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#25277}
This commit is contained in:
Mirko Bonadei 2018-10-18 11:35:32 +02:00 committed by Commit Bot
parent c538fc77b0
commit 2dfa998be2
44 changed files with 1098 additions and 937 deletions

View File

@ -14,14 +14,15 @@
#include "system_wrappers/include/sleep.h"
#include "test/testsupport/fileutils.h"
DEFINE_int(sample_rate_hz,
16000,
"Sample rate (Hz) of the produced audio files.");
WEBRTC_DEFINE_int(sample_rate_hz,
16000,
"Sample rate (Hz) of the produced audio files.");
DEFINE_bool(quick,
false,
"Don't do the full audio recording. "
"Used to quickly check that the test runs without crashing.");
WEBRTC_DEFINE_bool(
quick,
false,
"Don't do the full audio recording. "
"Used to quickly check that the test runs without crashing.");
namespace webrtc {
namespace test {

View File

@ -40,7 +40,9 @@ std::vector<uint32_t> GenerateSsrcs(size_t num_streams, uint32_t ssrc_offset) {
}
} // namespace
DEFINE_string(ramp_dump_name, "", "Filename for dumped received RTP stream.");
WEBRTC_DEFINE_string(ramp_dump_name,
"",
"Filename for dumped received RTP stream.");
RampUpTester::RampUpTester(size_t num_video_streams,
size_t num_audio_streams,

View File

@ -19,16 +19,16 @@ extern const uint16_t kDefaultServerPort; // From defaults.[h|cc]
// header file so that they can be shared across the different main.cc's
// for each platform.
DEFINE_bool(help, false, "Prints this message");
DEFINE_bool(autoconnect,
false,
"Connect to the server without user "
"intervention.");
DEFINE_string(server, "localhost", "The server to connect to.");
DEFINE_int(port,
kDefaultServerPort,
"The port on which the server is listening.");
DEFINE_bool(
WEBRTC_DEFINE_bool(help, false, "Prints this message");
WEBRTC_DEFINE_bool(autoconnect,
false,
"Connect to the server without user "
"intervention.");
WEBRTC_DEFINE_string(server, "localhost", "The server to connect to.");
WEBRTC_DEFINE_int(port,
kDefaultServerPort,
"The port on which the server is listening.");
WEBRTC_DEFINE_bool(
autocall,
false,
"Call the first available other client on "

View File

@ -33,16 +33,21 @@
using stunprober::StunProber;
using stunprober::AsyncCallback;
DEFINE_bool(help, false, "Prints this message");
DEFINE_int(interval, 10, "Interval of consecutive stun pings in milliseconds");
DEFINE_bool(shared_socket, false, "Share socket mode for different remote IPs");
DEFINE_int(pings_per_ip,
10,
"Number of consecutive stun pings to send for each IP");
DEFINE_int(timeout,
1000,
"Milliseconds of wait after the last ping sent before exiting");
DEFINE_string(
WEBRTC_DEFINE_bool(help, false, "Prints this message");
WEBRTC_DEFINE_int(interval,
10,
"Interval of consecutive stun pings in milliseconds");
WEBRTC_DEFINE_bool(shared_socket,
false,
"Share socket mode for different remote IPs");
WEBRTC_DEFINE_int(pings_per_ip,
10,
"Number of consecutive stun pings to send for each IP");
WEBRTC_DEFINE_int(
timeout,
1000,
"Milliseconds of wait after the last ping sent before exiting");
WEBRTC_DEFINE_string(
servers,
"stun.l.google.com:19302,stun1.l.google.com:19302,stun2.l.google.com:19302",
"Comma separated STUN server addresses with ports");

View File

@ -32,31 +32,32 @@ namespace {
using MediaType = webrtc::ParsedRtcEventLogNew::MediaType;
DEFINE_bool(
WEBRTC_DEFINE_bool(
audio,
true,
"Use --noaudio to exclude audio packets from the converted RTPdump file.");
DEFINE_bool(
WEBRTC_DEFINE_bool(
video,
true,
"Use --novideo to exclude video packets from the converted RTPdump file.");
DEFINE_bool(
WEBRTC_DEFINE_bool(
data,
true,
"Use --nodata to exclude data packets from the converted RTPdump file.");
DEFINE_bool(
WEBRTC_DEFINE_bool(
rtp,
true,
"Use --nortp to exclude RTP packets from the converted RTPdump file.");
DEFINE_bool(
WEBRTC_DEFINE_bool(
rtcp,
true,
"Use --nortcp to exclude RTCP packets from the converted RTPdump file.");
DEFINE_string(ssrc,
"",
"Store only packets with this SSRC (decimal or hex, the latter "
"starting with 0x).");
DEFINE_bool(help, false, "Prints this message.");
WEBRTC_DEFINE_string(
ssrc,
"",
"Store only packets with this SSRC (decimal or hex, the latter "
"starting with 0x).");
WEBRTC_DEFINE_bool(help, false, "Prints this message.");
// Parses the input string for a valid SSRC. If a valid SSRC is found, it is
// written to the output variable |ssrc|, and true is returned. Otherwise,

View File

@ -36,7 +36,7 @@ RTC_POP_IGNORING_WUNDEF()
namespace {
DEFINE_bool(help, false, "Prints this message.");
WEBRTC_DEFINE_bool(help, false, "Prints this message.");
struct Stats {
int count = 0;

View File

@ -42,35 +42,44 @@
namespace {
DEFINE_bool(unknown, true, "Use --nounknown to exclude unknown events.");
DEFINE_bool(startstop, true, "Use --nostartstop to exclude start/stop events.");
DEFINE_bool(config, true, "Use --noconfig to exclude stream configurations.");
DEFINE_bool(bwe, true, "Use --nobwe to exclude BWE events.");
DEFINE_bool(incoming, true, "Use --noincoming to exclude incoming packets.");
DEFINE_bool(outgoing, true, "Use --nooutgoing to exclude packets.");
WEBRTC_DEFINE_bool(unknown, true, "Use --nounknown to exclude unknown events.");
WEBRTC_DEFINE_bool(startstop,
true,
"Use --nostartstop to exclude start/stop events.");
WEBRTC_DEFINE_bool(config,
true,
"Use --noconfig to exclude stream configurations.");
WEBRTC_DEFINE_bool(bwe, true, "Use --nobwe to exclude BWE events.");
WEBRTC_DEFINE_bool(incoming,
true,
"Use --noincoming to exclude incoming packets.");
WEBRTC_DEFINE_bool(outgoing, true, "Use --nooutgoing to exclude packets.");
// TODO(terelius): Note that the media type doesn't work with outgoing packets.
DEFINE_bool(audio, true, "Use --noaudio to exclude audio packets.");
WEBRTC_DEFINE_bool(audio, true, "Use --noaudio to exclude audio packets.");
// TODO(terelius): Note that the media type doesn't work with outgoing packets.
DEFINE_bool(video, true, "Use --novideo to exclude video packets.");
WEBRTC_DEFINE_bool(video, true, "Use --novideo to exclude video packets.");
// TODO(terelius): Note that the media type doesn't work with outgoing packets.
DEFINE_bool(data, true, "Use --nodata to exclude data packets.");
DEFINE_bool(rtp, true, "Use --nortp to exclude RTP packets.");
DEFINE_bool(rtcp, true, "Use --nortcp to exclude RTCP packets.");
DEFINE_bool(playout, true, "Use --noplayout to exclude audio playout events.");
DEFINE_bool(ana, true, "Use --noana to exclude ANA events.");
DEFINE_bool(probe, true, "Use --noprobe to exclude probe events.");
DEFINE_bool(ice, true, "Use --noice to exclude ICE events.");
WEBRTC_DEFINE_bool(data, true, "Use --nodata to exclude data packets.");
WEBRTC_DEFINE_bool(rtp, true, "Use --nortp to exclude RTP packets.");
WEBRTC_DEFINE_bool(rtcp, true, "Use --nortcp to exclude RTCP packets.");
WEBRTC_DEFINE_bool(playout,
true,
"Use --noplayout to exclude audio playout events.");
WEBRTC_DEFINE_bool(ana, true, "Use --noana to exclude ANA events.");
WEBRTC_DEFINE_bool(probe, true, "Use --noprobe to exclude probe events.");
WEBRTC_DEFINE_bool(ice, true, "Use --noice to exclude ICE events.");
DEFINE_bool(print_full_packets,
false,
"Print the full RTP headers and RTCP packets in hex.");
WEBRTC_DEFINE_bool(print_full_packets,
false,
"Print the full RTP headers and RTCP packets in hex.");
// TODO(terelius): Allow a list of SSRCs.
DEFINE_string(ssrc,
"",
"Print only packets with this SSRC (decimal or hex, the latter "
"starting with 0x).");
DEFINE_bool(help, false, "Prints this message.");
WEBRTC_DEFINE_string(
ssrc,
"",
"Print only packets with this SSRC (decimal or hex, the latter "
"starting with 0x).");
WEBRTC_DEFINE_bool(help, false, "Prints this message.");
using MediaType = webrtc::ParsedRtcEventLogNew::MediaType;

View File

@ -52,7 +52,7 @@ RTC_PUSH_IGNORING_WUNDEF()
RTC_POP_IGNORING_WUNDEF()
#endif
DEFINE_bool(gen_ref, false, "Generate reference files.");
WEBRTC_DEFINE_bool(gen_ref, false, "Generate reference files.");
namespace webrtc {

View File

@ -25,7 +25,7 @@ namespace {
static const int kInputSampleRateKhz = 8;
static const int kOutputSampleRateKhz = 8;
DEFINE_int(frame_size_ms, 20, "Codec frame size (milliseconds).");
WEBRTC_DEFINE_int(frame_size_ms, 20, "Codec frame size (milliseconds).");
} // namespace

View File

@ -21,7 +21,7 @@ static const int kIsacBlockDurationMs = 30;
static const int kIsacInputSamplingKhz = 16;
static const int kIsacOutputSamplingKhz = 16;
DEFINE_int(bit_rate_kbps, 32, "Target bit rate (kbps).");
WEBRTC_DEFINE_int(bit_rate_kbps, 32, "Target bit rate (kbps).");
} // namespace

View File

@ -22,24 +22,26 @@ namespace {
static const int kOpusBlockDurationMs = 20;
static const int kOpusSamplingKhz = 48;
DEFINE_int(bit_rate_kbps, 32, "Target bit rate (kbps).");
WEBRTC_DEFINE_int(bit_rate_kbps, 32, "Target bit rate (kbps).");
DEFINE_int(complexity,
10,
"Complexity: 0 ~ 10 -- defined as in Opus"
"specification.");
WEBRTC_DEFINE_int(complexity,
10,
"Complexity: 0 ~ 10 -- defined as in Opus"
"specification.");
DEFINE_int(maxplaybackrate, 48000, "Maximum playback rate (Hz).");
WEBRTC_DEFINE_int(maxplaybackrate, 48000, "Maximum playback rate (Hz).");
DEFINE_int(application, 0, "Application mode: 0 -- VOIP, 1 -- Audio.");
WEBRTC_DEFINE_int(application, 0, "Application mode: 0 -- VOIP, 1 -- Audio.");
DEFINE_int(reported_loss_rate, 10, "Reported percentile of packet loss.");
WEBRTC_DEFINE_int(reported_loss_rate,
10,
"Reported percentile of packet loss.");
DEFINE_bool(fec, false, "Enable FEC for encoding (-nofec to disable).");
WEBRTC_DEFINE_bool(fec, false, "Enable FEC for encoding (-nofec to disable).");
DEFINE_bool(dtx, false, "Enable DTX for encoding (-nodtx to disable).");
WEBRTC_DEFINE_bool(dtx, false, "Enable DTX for encoding (-nodtx to disable).");
DEFINE_int(sub_packets, 1, "Number of sub packets to repacketize.");
WEBRTC_DEFINE_int(sub_packets, 1, "Number of sub packets to repacketize.");
} // namespace

View File

@ -26,7 +26,7 @@ namespace {
static const int kInputSampleRateKhz = 48;
static const int kOutputSampleRateKhz = 48;
DEFINE_int(frame_size_ms, 20, "Codec frame size (milliseconds).");
WEBRTC_DEFINE_int(frame_size_ms, 20, "Codec frame size (milliseconds).");
} // namespace

View File

@ -25,7 +25,7 @@ namespace {
static const int kInputSampleRateKhz = 8;
static const int kOutputSampleRateKhz = 8;
DEFINE_int(frame_size_ms, 20, "Codec frame size (milliseconds).");
WEBRTC_DEFINE_int(frame_size_ms, 20, "Codec frame size (milliseconds).");
} // namespace

View File

@ -16,10 +16,10 @@
#include "rtc_base/flags.h"
// Define command line flags.
DEFINE_int(runtime_ms, 10000, "Simulated runtime in ms.");
DEFINE_int(lossrate, 10, "Packet lossrate; drop every N packets.");
DEFINE_float(drift, 0.1f, "Clockdrift factor.");
DEFINE_bool(help, false, "Print this message.");
WEBRTC_DEFINE_int(runtime_ms, 10000, "Simulated runtime in ms.");
WEBRTC_DEFINE_int(lossrate, 10, "Packet lossrate; drop every N packets.");
WEBRTC_DEFINE_float(drift, 0.1f, "Clockdrift factor.");
WEBRTC_DEFINE_bool(help, false, "Print this message.");
int main(int argc, char* argv[]) {
std::string program_name = argv[0];

View File

@ -47,42 +47,47 @@ static bool ValidateFilename(const std::string& value, bool write) {
return true;
}
DEFINE_string(
WEBRTC_DEFINE_string(
in_filename,
DefaultInFilename().c_str(),
"Filename for input audio (specify sample rate with --input_sample_rate, "
"and channels with --channels).");
DEFINE_int(input_sample_rate, 16000, "Sample rate of input file in Hz.");
WEBRTC_DEFINE_int(input_sample_rate, 16000, "Sample rate of input file in Hz.");
DEFINE_int(channels, 1, "Number of channels in input audio.");
WEBRTC_DEFINE_int(channels, 1, "Number of channels in input audio.");
DEFINE_string(out_filename,
DefaultOutFilename().c_str(),
"Name of output audio file.");
WEBRTC_DEFINE_string(out_filename,
DefaultOutFilename().c_str(),
"Name of output audio file.");
DEFINE_int(runtime_ms, 10000, "Simulated runtime (milliseconds).");
WEBRTC_DEFINE_int(runtime_ms, 10000, "Simulated runtime (milliseconds).");
DEFINE_int(packet_loss_rate, 10, "Percentile of packet loss.");
WEBRTC_DEFINE_int(packet_loss_rate, 10, "Percentile of packet loss.");
DEFINE_int(random_loss_mode,
kUniformLoss,
"Random loss mode: 0--no loss, 1--uniform loss, 2--Gilbert Elliot "
"loss, 3--fixed loss.");
WEBRTC_DEFINE_int(
random_loss_mode,
kUniformLoss,
"Random loss mode: 0--no loss, 1--uniform loss, 2--Gilbert Elliot "
"loss, 3--fixed loss.");
DEFINE_int(burst_length,
30,
"Burst length in milliseconds, only valid for Gilbert Elliot loss.");
WEBRTC_DEFINE_int(
burst_length,
30,
"Burst length in milliseconds, only valid for Gilbert Elliot loss.");
DEFINE_float(drift_factor, 0.0, "Time drift factor.");
WEBRTC_DEFINE_float(drift_factor, 0.0, "Time drift factor.");
DEFINE_int(preload_packets, 0, "Preload the buffer with this many packets.");
WEBRTC_DEFINE_int(preload_packets,
0,
"Preload the buffer with this many packets.");
DEFINE_string(loss_events,
"",
"List of loss events time and duration separated by comma: "
"<first_event_time> <first_event_duration>, <second_event_time> "
"<second_event_duration>, ...");
WEBRTC_DEFINE_string(
loss_events,
"",
"List of loss events time and duration separated by comma: "
"<first_event_time> <first_event_duration>, <second_event_time> "
"<second_event_duration>, ...");
// ProbTrans00Solver() is to calculate the transition probability from no-loss
// state to itself in a modified Gilbert Elliot packet loss model. The result is

View File

@ -17,17 +17,17 @@
#include "system_wrappers/include/field_trial.h"
#include "test/field_trial.h"
DEFINE_bool(codec_map,
false,
"Prints the mapping between RTP payload type and "
"codec");
DEFINE_string(
WEBRTC_DEFINE_bool(codec_map,
false,
"Prints the mapping between RTP payload type and "
"codec");
WEBRTC_DEFINE_string(
force_fieldtrials,
"",
"Field trials control experimental feature code which can be forced. "
"E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
" will assign the group Enable to field trial WebRTC-FooFeature.");
DEFINE_bool(help, false, "Prints this message");
WEBRTC_DEFINE_bool(help, false, "Prints this message");
int main(int argc, char* argv[]) {
webrtc::test::NetEqTestFactory factory;

View File

@ -91,50 +91,57 @@ static bool ValidateExtensionId(int value) {
}
// Define command line flags.
DEFINE_int(pcmu, 0, "RTP payload type for PCM-u");
DEFINE_int(pcma, 8, "RTP payload type for PCM-a");
DEFINE_int(ilbc, 102, "RTP payload type for iLBC");
DEFINE_int(isac, 103, "RTP payload type for iSAC");
DEFINE_int(isac_swb, 104, "RTP payload type for iSAC-swb (32 kHz)");
DEFINE_int(opus, 111, "RTP payload type for Opus");
DEFINE_int(pcm16b, 93, "RTP payload type for PCM16b-nb (8 kHz)");
DEFINE_int(pcm16b_wb, 94, "RTP payload type for PCM16b-wb (16 kHz)");
DEFINE_int(pcm16b_swb32, 95, "RTP payload type for PCM16b-swb32 (32 kHz)");
DEFINE_int(pcm16b_swb48, 96, "RTP payload type for PCM16b-swb48 (48 kHz)");
DEFINE_int(g722, 9, "RTP payload type for G.722");
DEFINE_int(avt, 106, "RTP payload type for AVT/DTMF (8 kHz)");
DEFINE_int(avt_16, 114, "RTP payload type for AVT/DTMF (16 kHz)");
DEFINE_int(avt_32, 115, "RTP payload type for AVT/DTMF (32 kHz)");
DEFINE_int(avt_48, 116, "RTP payload type for AVT/DTMF (48 kHz)");
DEFINE_int(red, 117, "RTP payload type for redundant audio (RED)");
DEFINE_int(cn_nb, 13, "RTP payload type for comfort noise (8 kHz)");
DEFINE_int(cn_wb, 98, "RTP payload type for comfort noise (16 kHz)");
DEFINE_int(cn_swb32, 99, "RTP payload type for comfort noise (32 kHz)");
DEFINE_int(cn_swb48, 100, "RTP payload type for comfort noise (48 kHz)");
DEFINE_string(replacement_audio_file,
"",
"A PCM file that will be used to populate "
"dummy"
" RTP packets");
DEFINE_string(ssrc,
"",
"Only use packets with this SSRC (decimal or hex, the latter "
"starting with 0x)");
DEFINE_int(audio_level, 1, "Extension ID for audio level (RFC 6464)");
DEFINE_int(abs_send_time, 3, "Extension ID for absolute sender time");
DEFINE_int(transport_seq_no, 5, "Extension ID for transport sequence number");
DEFINE_int(video_content_type, 7, "Extension ID for video content type");
DEFINE_int(video_timing, 8, "Extension ID for video timing");
DEFINE_bool(matlabplot,
false,
"Generates a matlab script for plotting the delay profile");
DEFINE_bool(pythonplot,
false,
"Generates a python script for plotting the delay profile");
DEFINE_bool(concealment_events, false, "Prints concealment events");
DEFINE_int(max_nr_packets_in_buffer,
50,
"Maximum allowed number of packets in the buffer");
WEBRTC_DEFINE_int(pcmu, 0, "RTP payload type for PCM-u");
WEBRTC_DEFINE_int(pcma, 8, "RTP payload type for PCM-a");
WEBRTC_DEFINE_int(ilbc, 102, "RTP payload type for iLBC");
WEBRTC_DEFINE_int(isac, 103, "RTP payload type for iSAC");
WEBRTC_DEFINE_int(isac_swb, 104, "RTP payload type for iSAC-swb (32 kHz)");
WEBRTC_DEFINE_int(opus, 111, "RTP payload type for Opus");
WEBRTC_DEFINE_int(pcm16b, 93, "RTP payload type for PCM16b-nb (8 kHz)");
WEBRTC_DEFINE_int(pcm16b_wb, 94, "RTP payload type for PCM16b-wb (16 kHz)");
WEBRTC_DEFINE_int(pcm16b_swb32,
95,
"RTP payload type for PCM16b-swb32 (32 kHz)");
WEBRTC_DEFINE_int(pcm16b_swb48,
96,
"RTP payload type for PCM16b-swb48 (48 kHz)");
WEBRTC_DEFINE_int(g722, 9, "RTP payload type for G.722");
WEBRTC_DEFINE_int(avt, 106, "RTP payload type for AVT/DTMF (8 kHz)");
WEBRTC_DEFINE_int(avt_16, 114, "RTP payload type for AVT/DTMF (16 kHz)");
WEBRTC_DEFINE_int(avt_32, 115, "RTP payload type for AVT/DTMF (32 kHz)");
WEBRTC_DEFINE_int(avt_48, 116, "RTP payload type for AVT/DTMF (48 kHz)");
WEBRTC_DEFINE_int(red, 117, "RTP payload type for redundant audio (RED)");
WEBRTC_DEFINE_int(cn_nb, 13, "RTP payload type for comfort noise (8 kHz)");
WEBRTC_DEFINE_int(cn_wb, 98, "RTP payload type for comfort noise (16 kHz)");
WEBRTC_DEFINE_int(cn_swb32, 99, "RTP payload type for comfort noise (32 kHz)");
WEBRTC_DEFINE_int(cn_swb48, 100, "RTP payload type for comfort noise (48 kHz)");
WEBRTC_DEFINE_string(replacement_audio_file,
"",
"A PCM file that will be used to populate "
"dummy"
" RTP packets");
WEBRTC_DEFINE_string(
ssrc,
"",
"Only use packets with this SSRC (decimal or hex, the latter "
"starting with 0x)");
WEBRTC_DEFINE_int(audio_level, 1, "Extension ID for audio level (RFC 6464)");
WEBRTC_DEFINE_int(abs_send_time, 3, "Extension ID for absolute sender time");
WEBRTC_DEFINE_int(transport_seq_no,
5,
"Extension ID for transport sequence number");
WEBRTC_DEFINE_int(video_content_type, 7, "Extension ID for video content type");
WEBRTC_DEFINE_int(video_timing, 8, "Extension ID for video timing");
WEBRTC_DEFINE_bool(matlabplot,
false,
"Generates a matlab script for plotting the delay profile");
WEBRTC_DEFINE_bool(pythonplot,
false,
"Generates a python script for plotting the delay profile");
WEBRTC_DEFINE_bool(concealment_events, false, "Prints concealment events");
WEBRTC_DEFINE_int(max_nr_packets_in_buffer,
50,
"Maximum allowed number of packets in the buffer");
// Maps a codec type to a printable name string.
std::string CodecName(NetEqDecoder codec) {

View File

@ -19,16 +19,16 @@
#include "rtc_base/flags.h"
// Define command line flags.
DEFINE_int(red, 117, "RTP payload type for RED");
DEFINE_int(audio_level,
-1,
"Extension ID for audio level (RFC 6464); "
"-1 not to print audio level");
DEFINE_int(abs_send_time,
-1,
"Extension ID for absolute sender time; "
"-1 not to print absolute send time");
DEFINE_bool(help, false, "Print this message");
WEBRTC_DEFINE_int(red, 117, "RTP payload type for RED");
WEBRTC_DEFINE_int(audio_level,
-1,
"Extension ID for audio level (RFC 6464); "
"-1 not to print audio level");
WEBRTC_DEFINE_int(abs_send_time,
-1,
"Extension ID for absolute sender time; "
"-1 not to print absolute send time");
WEBRTC_DEFINE_bool(help, false, "Print this message");
int main(int argc, char* argv[]) {
std::string program_name = argv[0];

View File

@ -40,20 +40,24 @@ namespace test {
namespace {
// Define command line flags.
DEFINE_bool(list_codecs, false, "Enumerate all codecs");
DEFINE_string(codec, "opus", "Codec to use");
DEFINE_int(frame_len, 0, "Frame length in ms; 0 indicates codec default value");
DEFINE_int(bitrate, 0, "Bitrate in kbps; 0 indicates codec default value");
DEFINE_int(payload_type,
-1,
"RTP payload type; -1 indicates codec default value");
DEFINE_int(cng_payload_type,
-1,
"RTP payload type for CNG; -1 indicates default value");
DEFINE_int(ssrc, 0, "SSRC to write to the RTP header");
DEFINE_bool(dtx, false, "Use DTX/CNG");
DEFINE_int(sample_rate, 48000, "Sample rate of the input file");
DEFINE_bool(help, false, "Print this message");
WEBRTC_DEFINE_bool(list_codecs, false, "Enumerate all codecs");
WEBRTC_DEFINE_string(codec, "opus", "Codec to use");
WEBRTC_DEFINE_int(frame_len,
0,
"Frame length in ms; 0 indicates codec default value");
WEBRTC_DEFINE_int(bitrate,
0,
"Bitrate in kbps; 0 indicates codec default value");
WEBRTC_DEFINE_int(payload_type,
-1,
"RTP payload type; -1 indicates codec default value");
WEBRTC_DEFINE_int(cng_payload_type,
-1,
"RTP payload type for CNG; -1 indicates default value");
WEBRTC_DEFINE_int(ssrc, 0, "SSRC to write to the RTP header");
WEBRTC_DEFINE_bool(dtx, false, "Use DTX/CNG");
WEBRTC_DEFINE_int(sample_rate, 48000, "Sample rate of the input file");
WEBRTC_DEFINE_bool(help, false, "Print this message");
// Add new codecs here, and to the map below.
enum class CodecType {

View File

@ -23,7 +23,7 @@ namespace webrtc {
namespace test {
namespace {
DEFINE_bool(help, false, "Print help message");
WEBRTC_DEFINE_bool(help, false, "Print help message");
constexpr size_t kRtpDumpHeaderLength = 8;

View File

@ -19,24 +19,25 @@
#include "rtc_base/flags.h"
#include "rtc_base/strings/string_builder.h"
DEFINE_bool(help, false, "Prints this message");
DEFINE_int(sampling_rate,
16000,
"Rate at which to mix (all input streams must have this rate)");
WEBRTC_DEFINE_bool(help, false, "Prints this message");
WEBRTC_DEFINE_int(
sampling_rate,
16000,
"Rate at which to mix (all input streams must have this rate)");
DEFINE_bool(
WEBRTC_DEFINE_bool(
stereo,
false,
"Enable stereo (interleaved). Inputs need not be as this parameter.");
DEFINE_bool(limiter, true, "Enable limiter.");
DEFINE_string(output_file,
"mixed_file.wav",
"File in which to store the mixed result.");
DEFINE_string(input_file_1, "", "First input. Default none.");
DEFINE_string(input_file_2, "", "Second input. Default none.");
DEFINE_string(input_file_3, "", "Third input. Default none.");
DEFINE_string(input_file_4, "", "Fourth input. Default none.");
WEBRTC_DEFINE_bool(limiter, true, "Enable limiter.");
WEBRTC_DEFINE_string(output_file,
"mixed_file.wav",
"File in which to store the mixed result.");
WEBRTC_DEFINE_string(input_file_1, "", "First input. Default none.");
WEBRTC_DEFINE_string(input_file_2, "", "Second input. Default none.");
WEBRTC_DEFINE_string(input_file_3, "", "Third input. Default none.");
WEBRTC_DEFINE_string(input_file_4, "", "Fourth input. Default none.");
namespace webrtc {
namespace test {

View File

@ -25,22 +25,22 @@ namespace rnn_vad {
namespace test {
namespace {
DEFINE_string(i, "", "Path to the input wav file");
WEBRTC_DEFINE_string(i, "", "Path to the input wav file");
std::string InputWavFile() {
return static_cast<std::string>(FLAG_i);
}
DEFINE_string(f, "", "Path to the output features file");
WEBRTC_DEFINE_string(f, "", "Path to the output features file");
std::string OutputFeaturesFile() {
return static_cast<std::string>(FLAG_f);
}
DEFINE_string(o, "", "Path to the output VAD probabilities file");
WEBRTC_DEFINE_string(o, "", "Path to the output VAD probabilities file");
std::string OutputVadProbsFile() {
return static_cast<std::string>(FLAG_o);
}
DEFINE_bool(help, false, "Prints this message");
WEBRTC_DEFINE_bool(help, false, "Prints this message");
} // namespace

View File

@ -37,155 +37,170 @@ const char kUsageDescription[] =
"processing module, either based on wav files or "
"protobuf debug dump recordings.\n";
DEFINE_string(dump_input, "", "Aec dump input filename");
DEFINE_string(dump_output, "", "Aec dump output filename");
DEFINE_string(i, "", "Forward stream input wav filename");
DEFINE_string(o, "", "Forward stream output wav filename");
DEFINE_string(ri, "", "Reverse stream input wav filename");
DEFINE_string(ro, "", "Reverse stream output wav filename");
DEFINE_string(artificial_nearend, "", "Artificial nearend wav filename");
DEFINE_int(output_num_channels,
kParameterNotSpecifiedValue,
"Number of forward stream output channels");
DEFINE_int(reverse_output_num_channels,
kParameterNotSpecifiedValue,
"Number of Reverse stream output channels");
DEFINE_int(output_sample_rate_hz,
kParameterNotSpecifiedValue,
"Forward stream output sample rate in Hz");
DEFINE_int(reverse_output_sample_rate_hz,
kParameterNotSpecifiedValue,
"Reverse stream output sample rate in Hz");
DEFINE_bool(fixed_interface,
false,
"Use the fixed interface when operating on wav files");
DEFINE_int(aec,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the echo canceller");
DEFINE_int(aecm,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the mobile echo controller");
DEFINE_int(ed,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate (0) the residual echo detector");
DEFINE_string(ed_graph, "", "Output filename for graph of echo likelihood");
DEFINE_int(agc,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the AGC");
DEFINE_int(agc2,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the AGC2");
DEFINE_int(pre_amplifier,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the pre amplifier");
DEFINE_int(hpf,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the high-pass filter");
DEFINE_int(ns,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the noise suppressor");
DEFINE_int(ts,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the transient suppressor");
DEFINE_int(vad,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the voice activity detector");
DEFINE_int(le,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the level estimator");
DEFINE_bool(all_default,
false,
"Activate all of the default components (will be overridden by any "
"other settings)");
DEFINE_int(aec_suppression_level,
kParameterNotSpecifiedValue,
"Set the aec suppression level (0-2)");
DEFINE_int(delay_agnostic,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the AEC delay agnostic mode");
DEFINE_int(extended_filter,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the AEC extended filter mode");
DEFINE_int(aec3,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the experimental AEC mode AEC3");
DEFINE_int(experimental_agc,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the experimental AGC");
DEFINE_int(experimental_agc_disable_digital_adaptive,
kParameterNotSpecifiedValue,
"Force-deactivate (1) digital adaptation in "
"experimental AGC. Digital adaptation is active by default (0).");
DEFINE_int(experimental_agc_analyze_before_aec,
kParameterNotSpecifiedValue,
"Make level estimation happen before AEC"
" in the experimental AGC. After AEC is the default (0)");
DEFINE_int(
WEBRTC_DEFINE_string(dump_input, "", "Aec dump input filename");
WEBRTC_DEFINE_string(dump_output, "", "Aec dump output filename");
WEBRTC_DEFINE_string(i, "", "Forward stream input wav filename");
WEBRTC_DEFINE_string(o, "", "Forward stream output wav filename");
WEBRTC_DEFINE_string(ri, "", "Reverse stream input wav filename");
WEBRTC_DEFINE_string(ro, "", "Reverse stream output wav filename");
WEBRTC_DEFINE_string(artificial_nearend, "", "Artificial nearend wav filename");
WEBRTC_DEFINE_int(output_num_channels,
kParameterNotSpecifiedValue,
"Number of forward stream output channels");
WEBRTC_DEFINE_int(reverse_output_num_channels,
kParameterNotSpecifiedValue,
"Number of Reverse stream output channels");
WEBRTC_DEFINE_int(output_sample_rate_hz,
kParameterNotSpecifiedValue,
"Forward stream output sample rate in Hz");
WEBRTC_DEFINE_int(reverse_output_sample_rate_hz,
kParameterNotSpecifiedValue,
"Reverse stream output sample rate in Hz");
WEBRTC_DEFINE_bool(fixed_interface,
false,
"Use the fixed interface when operating on wav files");
WEBRTC_DEFINE_int(aec,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the echo canceller");
WEBRTC_DEFINE_int(aecm,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the mobile echo controller");
WEBRTC_DEFINE_int(ed,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate (0) the residual echo detector");
WEBRTC_DEFINE_string(ed_graph,
"",
"Output filename for graph of echo likelihood");
WEBRTC_DEFINE_int(agc,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the AGC");
WEBRTC_DEFINE_int(agc2,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the AGC2");
WEBRTC_DEFINE_int(pre_amplifier,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the pre amplifier");
WEBRTC_DEFINE_int(hpf,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the high-pass filter");
WEBRTC_DEFINE_int(ns,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the noise suppressor");
WEBRTC_DEFINE_int(ts,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the transient suppressor");
WEBRTC_DEFINE_int(vad,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the voice activity detector");
WEBRTC_DEFINE_int(le,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the level estimator");
WEBRTC_DEFINE_bool(
all_default,
false,
"Activate all of the default components (will be overridden by any "
"other settings)");
WEBRTC_DEFINE_int(aec_suppression_level,
kParameterNotSpecifiedValue,
"Set the aec suppression level (0-2)");
WEBRTC_DEFINE_int(delay_agnostic,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the AEC delay agnostic mode");
WEBRTC_DEFINE_int(extended_filter,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the AEC extended filter mode");
WEBRTC_DEFINE_int(
aec3,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the experimental AEC mode AEC3");
WEBRTC_DEFINE_int(experimental_agc,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the experimental AGC");
WEBRTC_DEFINE_int(
experimental_agc_disable_digital_adaptive,
kParameterNotSpecifiedValue,
"Force-deactivate (1) digital adaptation in "
"experimental AGC. Digital adaptation is active by default (0).");
WEBRTC_DEFINE_int(experimental_agc_analyze_before_aec,
kParameterNotSpecifiedValue,
"Make level estimation happen before AEC"
" in the experimental AGC. After AEC is the default (0)");
WEBRTC_DEFINE_int(
experimental_agc_agc2_level_estimator,
kParameterNotSpecifiedValue,
"AGC2 level estimation"
" in the experimental AGC. AGC1 level estimation is the default (0)");
DEFINE_int(
WEBRTC_DEFINE_int(
refined_adaptive_filter,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the refined adaptive filter functionality");
DEFINE_int(agc_mode, kParameterNotSpecifiedValue, "Specify the AGC mode (0-2)");
DEFINE_int(agc_target_level,
kParameterNotSpecifiedValue,
"Specify the AGC target level (0-31)");
DEFINE_int(agc_limiter,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the level estimator");
DEFINE_int(agc_compression_gain,
kParameterNotSpecifiedValue,
"Specify the AGC compression gain (0-90)");
DEFINE_float(agc2_enable_adaptive_gain,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the AGC2 adaptive gain");
DEFINE_float(agc2_fixed_gain_db, 0.f, "AGC2 fixed gain (dB) to apply");
DEFINE_float(pre_amplifier_gain_factor,
1.f,
"Pre-amplifier gain factor (linear) to apply");
DEFINE_int(vad_likelihood,
kParameterNotSpecifiedValue,
"Specify the VAD likelihood (0-3)");
DEFINE_int(ns_level, kParameterNotSpecifiedValue, "Specify the NS level (0-3)");
DEFINE_int(stream_delay,
kParameterNotSpecifiedValue,
"Specify the stream delay in ms to use");
DEFINE_int(use_stream_delay,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) reporting the stream delay");
DEFINE_int(stream_drift_samples,
kParameterNotSpecifiedValue,
"Specify the number of stream drift samples to use");
DEFINE_int(initial_mic_level, 100, "Initial mic level (0-255)");
DEFINE_int(simulate_mic_gain,
0,
"Activate (1) or deactivate(0) the analog mic gain simulation");
DEFINE_int(simulated_mic_kind,
kParameterNotSpecifiedValue,
"Specify which microphone kind to use for microphone simulation");
DEFINE_bool(performance_report, false, "Report the APM performance ");
DEFINE_bool(verbose, false, "Produce verbose output");
DEFINE_bool(quiet, false, "Avoid producing information about the progress.");
DEFINE_bool(bitexactness_report,
false,
"Report bitexactness for aec dump result reproduction");
DEFINE_bool(discard_settings_in_aecdump,
false,
"Discard any config settings specified in the aec dump");
DEFINE_bool(store_intermediate_output,
false,
"Creates new output files after each init");
DEFINE_string(custom_call_order_file, "", "Custom process API call order file");
DEFINE_bool(print_aec3_parameter_values,
false,
"Print parameter values used in AEC3 in JSON-format");
DEFINE_string(aec3_settings,
"",
"File in JSON-format with custom AEC3 settings");
DEFINE_bool(help, false, "Print this message");
WEBRTC_DEFINE_int(agc_mode,
kParameterNotSpecifiedValue,
"Specify the AGC mode (0-2)");
WEBRTC_DEFINE_int(agc_target_level,
kParameterNotSpecifiedValue,
"Specify the AGC target level (0-31)");
WEBRTC_DEFINE_int(agc_limiter,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the level estimator");
WEBRTC_DEFINE_int(agc_compression_gain,
kParameterNotSpecifiedValue,
"Specify the AGC compression gain (0-90)");
WEBRTC_DEFINE_float(agc2_enable_adaptive_gain,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) the AGC2 adaptive gain");
WEBRTC_DEFINE_float(agc2_fixed_gain_db, 0.f, "AGC2 fixed gain (dB) to apply");
WEBRTC_DEFINE_float(pre_amplifier_gain_factor,
1.f,
"Pre-amplifier gain factor (linear) to apply");
WEBRTC_DEFINE_int(vad_likelihood,
kParameterNotSpecifiedValue,
"Specify the VAD likelihood (0-3)");
WEBRTC_DEFINE_int(ns_level,
kParameterNotSpecifiedValue,
"Specify the NS level (0-3)");
WEBRTC_DEFINE_int(stream_delay,
kParameterNotSpecifiedValue,
"Specify the stream delay in ms to use");
WEBRTC_DEFINE_int(use_stream_delay,
kParameterNotSpecifiedValue,
"Activate (1) or deactivate(0) reporting the stream delay");
WEBRTC_DEFINE_int(stream_drift_samples,
kParameterNotSpecifiedValue,
"Specify the number of stream drift samples to use");
WEBRTC_DEFINE_int(initial_mic_level, 100, "Initial mic level (0-255)");
WEBRTC_DEFINE_int(
simulate_mic_gain,
0,
"Activate (1) or deactivate(0) the analog mic gain simulation");
WEBRTC_DEFINE_int(
simulated_mic_kind,
kParameterNotSpecifiedValue,
"Specify which microphone kind to use for microphone simulation");
WEBRTC_DEFINE_bool(performance_report, false, "Report the APM performance ");
WEBRTC_DEFINE_bool(verbose, false, "Produce verbose output");
WEBRTC_DEFINE_bool(quiet,
false,
"Avoid producing information about the progress.");
WEBRTC_DEFINE_bool(bitexactness_report,
false,
"Report bitexactness for aec dump result reproduction");
WEBRTC_DEFINE_bool(discard_settings_in_aecdump,
false,
"Discard any config settings specified in the aec dump");
WEBRTC_DEFINE_bool(store_intermediate_output,
false,
"Creates new output files after each init");
WEBRTC_DEFINE_string(custom_call_order_file,
"",
"Custom process API call order file");
WEBRTC_DEFINE_bool(print_aec3_parameter_values,
false,
"Print parameter values used in AEC3 in JSON-format");
WEBRTC_DEFINE_string(aec3_settings,
"",
"File in JSON-format with custom AEC3 settings");
WEBRTC_DEFINE_bool(help, false, "Print this message");
void SetSettingIfSpecified(const std::string& value,
absl::optional<std::string>* parameter) {

View File

@ -32,10 +32,10 @@ const char kUsageDescription[] =
"Command-line tool to generate multiple-end audio tracks to simulate "
"conversational speech with two or more participants.\n";
DEFINE_string(i, "", "Directory containing the speech turn wav files");
DEFINE_string(t, "", "Path to the timing text file");
DEFINE_string(o, "", "Output wav files destination path");
DEFINE_bool(help, false, "Prints this message");
WEBRTC_DEFINE_string(i, "", "Directory containing the speech turn wav files");
WEBRTC_DEFINE_string(t, "", "Path to the timing text file");
WEBRTC_DEFINE_string(o, "", "Output wav files destination path");
WEBRTC_DEFINE_bool(help, false, "Prints this message");
} // namespace

View File

@ -24,9 +24,9 @@ constexpr int kMaxSampleRate = 48000;
constexpr size_t kMaxFrameLen =
kAudioFrameLengthMilliseconds * kMaxSampleRate / 1000;
DEFINE_string(i, "", "Input wav file");
DEFINE_string(o_probs, "", "VAD probabilities output file");
DEFINE_string(o_rms, "", "VAD output file");
WEBRTC_DEFINE_string(i, "", "Input wav file");
WEBRTC_DEFINE_string(o_probs, "", "VAD probabilities output file");
WEBRTC_DEFINE_string(o_rms, "", "VAD output file");
int main(int argc, char* argv[]) {
if (rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true))

View File

@ -26,13 +26,13 @@ constexpr size_t kMaxFrameLen = kMaxFrameLenMs * kMaxSampleRate / 1000;
const double kOneDbReduction = DbToRatio(-1.0);
DEFINE_string(i, "", "Input wav file");
DEFINE_string(oc, "", "Config output file");
DEFINE_string(ol, "", "Levels output file");
DEFINE_float(a, 5.f, "Attack (ms)");
DEFINE_float(d, 20.f, "Decay (ms)");
DEFINE_int(f, 10, "Frame length (ms)");
DEFINE_bool(help, false, "prints this message");
WEBRTC_DEFINE_string(i, "", "Input wav file");
WEBRTC_DEFINE_string(oc, "", "Config output file");
WEBRTC_DEFINE_string(ol, "", "Levels output file");
WEBRTC_DEFINE_float(a, 5.f, "Attack (ms)");
WEBRTC_DEFINE_float(d, 20.f, "Decay (ms)");
WEBRTC_DEFINE_int(f, 10, "Frame length (ms)");
WEBRTC_DEFINE_bool(help, false, "prints this message");
int main(int argc, char* argv[]) {
if (rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true)) {

View File

@ -27,8 +27,8 @@ constexpr size_t kMaxFrameLen =
constexpr uint8_t kBitmaskBuffSize = 8;
DEFINE_string(i, "", "Input wav file");
DEFINE_string(o, "", "VAD output file");
WEBRTC_DEFINE_string(i, "", "Input wav file");
WEBRTC_DEFINE_string(o, "", "VAD output file");
int main(int argc, char* argv[]) {
if (rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true))

View File

@ -23,26 +23,28 @@
#include "test/gtest.h"
#include "test/testsupport/fileutils.h"
DEFINE_string(in_file_name, "", "PCM file that contains the signal.");
DEFINE_string(detection_file_name,
"",
"PCM file that contains the detection signal.");
DEFINE_string(reference_file_name,
"",
"PCM file that contains the reference signal.");
WEBRTC_DEFINE_string(in_file_name, "", "PCM file that contains the signal.");
WEBRTC_DEFINE_string(detection_file_name,
"",
"PCM file that contains the detection signal.");
WEBRTC_DEFINE_string(reference_file_name,
"",
"PCM file that contains the reference signal.");
DEFINE_int(chunk_size_ms,
10,
"Time between each chunk of samples in milliseconds.");
WEBRTC_DEFINE_int(chunk_size_ms,
10,
"Time between each chunk of samples in milliseconds.");
DEFINE_int(sample_rate_hz, 16000, "Sampling frequency of the signal in Hertz.");
DEFINE_int(detection_rate_hz,
0,
"Sampling frequency of the detection signal in Hertz.");
WEBRTC_DEFINE_int(sample_rate_hz,
16000,
"Sampling frequency of the signal in Hertz.");
WEBRTC_DEFINE_int(detection_rate_hz,
0,
"Sampling frequency of the detection signal in Hertz.");
DEFINE_int(num_channels, 1, "Number of channels.");
WEBRTC_DEFINE_int(num_channels, 1, "Number of channels.");
DEFINE_bool(help, false, "Print this message.");
WEBRTC_DEFINE_bool(help, false, "Print this message.");
namespace webrtc {

View File

@ -24,28 +24,30 @@
namespace flags {
DEFINE_string(extension_type,
"abs",
"Extension type, either abs for absolute send time or tsoffset "
"for timestamp offset.");
WEBRTC_DEFINE_string(
extension_type,
"abs",
"Extension type, either abs for absolute send time or tsoffset "
"for timestamp offset.");
std::string ExtensionType() {
return static_cast<std::string>(FLAG_extension_type);
}
DEFINE_int(extension_id, 3, "Extension id.");
WEBRTC_DEFINE_int(extension_id, 3, "Extension id.");
int ExtensionId() {
return static_cast<int>(FLAG_extension_id);
}
DEFINE_string(input_file, "", "Input file.");
WEBRTC_DEFINE_string(input_file, "", "Input file.");
std::string InputFile() {
return static_cast<std::string>(FLAG_input_file);
}
DEFINE_string(ssrc_filter,
"",
"Comma-separated list of SSRCs in hexadecimal which are to be "
"used as input to the BWE (only applicable to pcap files).");
WEBRTC_DEFINE_string(
ssrc_filter,
"",
"Comma-separated list of SSRCs in hexadecimal which are to be "
"used as input to the BWE (only applicable to pcap files).");
std::set<uint32_t> SsrcFilter() {
std::string ssrc_filter_string = static_cast<std::string>(FLAG_ssrc_filter);
if (ssrc_filter_string.empty())
@ -64,7 +66,7 @@ std::set<uint32_t> SsrcFilter() {
return ssrcs;
}
DEFINE_bool(help, false, "Print this message.");
WEBRTC_DEFINE_bool(help, false, "Print this message.");
} // namespace flags
bool ParseArgsAndSetupEstimator(int argc,

View File

@ -36,7 +36,7 @@ union FlagValue {
// bool values ('bool b = "false";' results in b == true!), we pass
// and int argument to New_BOOL as this appears to be safer - sigh.
// In particular, it prevents the (not uncommon!) bug where a bool
// flag is defined via: DEFINE_bool(flag, "false", "some comment");.
// flag is defined via: WEBRTC_DEFINE_bool(flag, "false", "some comment");.
static FlagValue New_BOOL(int b) {
FlagValue v;
v.b = (b != 0);
@ -155,7 +155,7 @@ class Flag {
};
// Internal use only.
#define DEFINE_FLAG(type, c_type, name, default, comment) \
#define WEBRTC_DEFINE_FLAG(type, c_type, name, default, comment) \
/* define and initialize the flag */ \
c_type FLAG_##name = (default); \
/* register the flag */ \
@ -164,25 +164,25 @@ class Flag {
rtc::FlagValue::New_##type(default))
// Internal use only.
#define DECLARE_FLAG(c_type, name) \
/* declare the external flag */ \
#define WEBRTC_DECLARE_FLAG(c_type, name) \
/* declare the external flag */ \
extern c_type FLAG_##name
// Use the following macros to define a new flag:
#define DEFINE_bool(name, default, comment) \
DEFINE_FLAG(BOOL, bool, name, default, comment)
#define DEFINE_int(name, default, comment) \
DEFINE_FLAG(INT, int, name, default, comment)
#define DEFINE_float(name, default, comment) \
DEFINE_FLAG(FLOAT, double, name, default, comment)
#define DEFINE_string(name, default, comment) \
DEFINE_FLAG(STRING, const char*, name, default, comment)
#define WEBRTC_DEFINE_bool(name, default, comment) \
WEBRTC_DEFINE_FLAG(BOOL, bool, name, default, comment)
#define WEBRTC_DEFINE_int(name, default, comment) \
WEBRTC_DEFINE_FLAG(INT, int, name, default, comment)
#define WEBRTC_DEFINE_float(name, default, comment) \
WEBRTC_DEFINE_FLAG(FLOAT, double, name, default, comment)
#define WEBRTC_DEFINE_string(name, default, comment) \
WEBRTC_DEFINE_FLAG(STRING, const char*, name, default, comment)
// Use the following macros to declare a flag defined elsewhere:
#define DECLARE_bool(name) DECLARE_FLAG(bool, name)
#define DECLARE_int(name) DECLARE_FLAG(int, name)
#define DECLARE_float(name) DECLARE_FLAG(double, name)
#define DECLARE_string(name) DECLARE_FLAG(const char*, name)
#define WEBRTC_DECLARE_bool(name) WEBRTC_DECLARE_FLAG(bool, name)
#define WEBRTC_DECLARE_int(name) WEBRTC_DECLARE_FLAG(int, name)
#define WEBRTC_DECLARE_float(name) WEBRTC_DECLARE_FLAG(double, name)
#define WEBRTC_DECLARE_string(name) WEBRTC_DECLARE_FLAG(const char*, name)
// The global list of all flags.
class FlagList {

View File

@ -31,19 +31,20 @@
#include "test/ios/test_support.h"
#endif
DEFINE_bool(help, false, "prints this message");
DEFINE_string(log, "", "logging options to use");
DEFINE_string(
WEBRTC_DEFINE_bool(help, false, "prints this message");
WEBRTC_DEFINE_string(log, "", "logging options to use");
WEBRTC_DEFINE_string(
force_fieldtrials,
"",
"Field trials control experimental feature code which can be forced. "
"E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
" will assign the group Enable to field trial WebRTC-FooFeature.");
#if defined(WEBRTC_WIN)
DEFINE_int(crt_break_alloc, -1, "memory allocation to break on");
DEFINE_bool(default_error_handlers,
false,
"leave the default exception/dbg handler functions in place");
WEBRTC_DEFINE_int(crt_break_alloc, -1, "memory allocation to break on");
WEBRTC_DEFINE_bool(
default_error_handlers,
false,
"leave the default exception/dbg handler functions in place");
void TestInvalidParameterHandler(const wchar_t* expression,
const wchar_t* function,

View File

@ -30,32 +30,34 @@
static const int kAgcAnalWindowSamples = 100;
static const float kDefaultActivityThreshold = 0.3f;
DEFINE_bool(standalone_vad, true, "enable stand-alone VAD");
DEFINE_string(true_vad,
"",
"name of a file containing true VAD in 'int'"
" format");
DEFINE_string(video_vad,
"",
"name of a file containing video VAD (activity"
" probabilities) in double format. One activity per 10ms is"
" required. If no file is given the video information is not"
" incorporated. Negative activity is interpreted as video is"
" not adapted and the statistics are not computed during"
" the learning phase. Note that the negative video activities"
" are ONLY allowed at the beginning.");
DEFINE_string(result,
"",
"name of a file to write the results. The results"
" will be appended to the end of the file. This is optional.");
DEFINE_string(audio_content,
"",
"name of a file where audio content is written"
" to, in double format.");
DEFINE_float(activity_threshold,
kDefaultActivityThreshold,
"Activity threshold");
DEFINE_bool(help, false, "prints this message");
WEBRTC_DEFINE_bool(standalone_vad, true, "enable stand-alone VAD");
WEBRTC_DEFINE_string(true_vad,
"",
"name of a file containing true VAD in 'int'"
" format");
WEBRTC_DEFINE_string(
video_vad,
"",
"name of a file containing video VAD (activity"
" probabilities) in double format. One activity per 10ms is"
" required. If no file is given the video information is not"
" incorporated. Negative activity is interpreted as video is"
" not adapted and the statistics are not computed during"
" the learning phase. Note that the negative video activities"
" are ONLY allowed at the beginning.");
WEBRTC_DEFINE_string(
result,
"",
"name of a file to write the results. The results"
" will be appended to the end of the file. This is optional.");
WEBRTC_DEFINE_string(audio_content,
"",
"name of a file where audio content is written"
" to, in double format.");
WEBRTC_DEFINE_float(activity_threshold,
kDefaultActivityThreshold,
"Activity threshold");
WEBRTC_DEFINE_bool(help, false, "prints this message");
namespace webrtc {

View File

@ -20,151 +20,173 @@
#include "test/field_trial.h"
#include "test/testsupport/fileutils.h"
DEFINE_string(plot_profile,
"default",
"A profile that selects a certain subset of the plots. Currently "
"defined profiles are \"all\", \"none\", \"sendside_bwe\","
"\"receiveside_bwe\" and \"default\"");
WEBRTC_DEFINE_string(
plot_profile,
"default",
"A profile that selects a certain subset of the plots. Currently "
"defined profiles are \"all\", \"none\", \"sendside_bwe\","
"\"receiveside_bwe\" and \"default\"");
DEFINE_bool(plot_incoming_packet_sizes,
false,
"Plot bar graph showing the size of each incoming packet.");
DEFINE_bool(plot_outgoing_packet_sizes,
false,
"Plot bar graph showing the size of each outgoing packet.");
DEFINE_bool(plot_incoming_packet_count,
false,
"Plot the accumulated number of packets for each incoming stream.");
DEFINE_bool(plot_outgoing_packet_count,
false,
"Plot the accumulated number of packets for each outgoing stream.");
DEFINE_bool(plot_audio_playout,
false,
"Plot bar graph showing the time between each audio playout.");
DEFINE_bool(plot_audio_level,
false,
"Plot line graph showing the audio level of incoming audio.");
DEFINE_bool(plot_incoming_sequence_number_delta,
false,
"Plot the sequence number difference between consecutive incoming "
"packets.");
DEFINE_bool(
WEBRTC_DEFINE_bool(plot_incoming_packet_sizes,
false,
"Plot bar graph showing the size of each incoming packet.");
WEBRTC_DEFINE_bool(plot_outgoing_packet_sizes,
false,
"Plot bar graph showing the size of each outgoing packet.");
WEBRTC_DEFINE_bool(
plot_incoming_packet_count,
false,
"Plot the accumulated number of packets for each incoming stream.");
WEBRTC_DEFINE_bool(
plot_outgoing_packet_count,
false,
"Plot the accumulated number of packets for each outgoing stream.");
WEBRTC_DEFINE_bool(
plot_audio_playout,
false,
"Plot bar graph showing the time between each audio playout.");
WEBRTC_DEFINE_bool(
plot_audio_level,
false,
"Plot line graph showing the audio level of incoming audio.");
WEBRTC_DEFINE_bool(
plot_incoming_sequence_number_delta,
false,
"Plot the sequence number difference between consecutive incoming "
"packets.");
WEBRTC_DEFINE_bool(
plot_incoming_delay_delta,
false,
"Plot the difference in 1-way path delay between consecutive packets.");
DEFINE_bool(plot_incoming_delay,
true,
"Plot the 1-way path delay for incoming packets, normalized so "
"that the first packet has delay 0.");
DEFINE_bool(plot_incoming_loss_rate,
true,
"Compute the loss rate for incoming packets using a method that's "
"similar to the one used for RTCP SR and RR fraction lost. Note "
"that the loss rate can be negative if packets are duplicated or "
"reordered.");
DEFINE_bool(plot_incoming_bitrate,
true,
"Plot the total bitrate used by all incoming streams.");
DEFINE_bool(plot_outgoing_bitrate,
true,
"Plot the total bitrate used by all outgoing streams.");
DEFINE_bool(plot_incoming_stream_bitrate,
true,
"Plot the bitrate used by each incoming stream.");
DEFINE_bool(plot_outgoing_stream_bitrate,
true,
"Plot the bitrate used by each outgoing stream.");
DEFINE_bool(plot_simulated_receiveside_bwe,
false,
"Run the receive-side bandwidth estimator with the incoming rtp "
"packets and plot the resulting estimate.");
DEFINE_bool(plot_simulated_sendside_bwe,
false,
"Run the send-side bandwidth estimator with the outgoing rtp and "
"incoming rtcp and plot the resulting estimate.");
DEFINE_bool(plot_network_delay_feedback,
true,
"Compute network delay based on sent packets and the received "
"transport feedback.");
DEFINE_bool(plot_fraction_loss_feedback,
true,
"Plot packet loss in percent for outgoing packets (as perceived by "
"the send-side bandwidth estimator).");
DEFINE_bool(plot_pacer_delay,
false,
"Plot the time each sent packet has spent in the pacer (based on "
"the difference between the RTP timestamp and the send "
"timestamp).");
DEFINE_bool(plot_timestamps,
false,
"Plot the rtp timestamps of all rtp and rtcp packets over time.");
DEFINE_bool(plot_rtcp_details,
false,
"Plot the contents of all report blocks in all sender and receiver "
"reports. This includes fraction lost, cumulative number of lost "
"packets, extended highest sequence number and time since last "
"received SR.");
DEFINE_bool(plot_audio_encoder_bitrate_bps,
false,
"Plot the audio encoder target bitrate.");
DEFINE_bool(plot_audio_encoder_frame_length_ms,
false,
"Plot the audio encoder frame length.");
DEFINE_bool(
WEBRTC_DEFINE_bool(
plot_incoming_delay,
true,
"Plot the 1-way path delay for incoming packets, normalized so "
"that the first packet has delay 0.");
WEBRTC_DEFINE_bool(
plot_incoming_loss_rate,
true,
"Compute the loss rate for incoming packets using a method that's "
"similar to the one used for RTCP SR and RR fraction lost. Note "
"that the loss rate can be negative if packets are duplicated or "
"reordered.");
WEBRTC_DEFINE_bool(plot_incoming_bitrate,
true,
"Plot the total bitrate used by all incoming streams.");
WEBRTC_DEFINE_bool(plot_outgoing_bitrate,
true,
"Plot the total bitrate used by all outgoing streams.");
WEBRTC_DEFINE_bool(plot_incoming_stream_bitrate,
true,
"Plot the bitrate used by each incoming stream.");
WEBRTC_DEFINE_bool(plot_outgoing_stream_bitrate,
true,
"Plot the bitrate used by each outgoing stream.");
WEBRTC_DEFINE_bool(
plot_simulated_receiveside_bwe,
false,
"Run the receive-side bandwidth estimator with the incoming rtp "
"packets and plot the resulting estimate.");
WEBRTC_DEFINE_bool(
plot_simulated_sendside_bwe,
false,
"Run the send-side bandwidth estimator with the outgoing rtp and "
"incoming rtcp and plot the resulting estimate.");
WEBRTC_DEFINE_bool(
plot_network_delay_feedback,
true,
"Compute network delay based on sent packets and the received "
"transport feedback.");
WEBRTC_DEFINE_bool(
plot_fraction_loss_feedback,
true,
"Plot packet loss in percent for outgoing packets (as perceived by "
"the send-side bandwidth estimator).");
WEBRTC_DEFINE_bool(
plot_pacer_delay,
false,
"Plot the time each sent packet has spent in the pacer (based on "
"the difference between the RTP timestamp and the send "
"timestamp).");
WEBRTC_DEFINE_bool(
plot_timestamps,
false,
"Plot the rtp timestamps of all rtp and rtcp packets over time.");
WEBRTC_DEFINE_bool(
plot_rtcp_details,
false,
"Plot the contents of all report blocks in all sender and receiver "
"reports. This includes fraction lost, cumulative number of lost "
"packets, extended highest sequence number and time since last "
"received SR.");
WEBRTC_DEFINE_bool(plot_audio_encoder_bitrate_bps,
false,
"Plot the audio encoder target bitrate.");
WEBRTC_DEFINE_bool(plot_audio_encoder_frame_length_ms,
false,
"Plot the audio encoder frame length.");
WEBRTC_DEFINE_bool(
plot_audio_encoder_packet_loss,
false,
"Plot the uplink packet loss fraction which is sent to the audio encoder.");
DEFINE_bool(plot_audio_encoder_fec, false, "Plot the audio encoder FEC.");
DEFINE_bool(plot_audio_encoder_dtx, false, "Plot the audio encoder DTX.");
DEFINE_bool(plot_audio_encoder_num_channels,
false,
"Plot the audio encoder number of channels.");
DEFINE_bool(plot_neteq_stats, false, "Plot the NetEq statistics.");
DEFINE_bool(plot_ice_candidate_pair_config,
false,
"Plot the ICE candidate pair config events.");
DEFINE_bool(plot_ice_connectivity_check,
false,
"Plot the ICE candidate pair connectivity checks.");
WEBRTC_DEFINE_bool(plot_audio_encoder_fec,
false,
"Plot the audio encoder FEC.");
WEBRTC_DEFINE_bool(plot_audio_encoder_dtx,
false,
"Plot the audio encoder DTX.");
WEBRTC_DEFINE_bool(plot_audio_encoder_num_channels,
false,
"Plot the audio encoder number of channels.");
WEBRTC_DEFINE_bool(plot_neteq_stats, false, "Plot the NetEq statistics.");
WEBRTC_DEFINE_bool(plot_ice_candidate_pair_config,
false,
"Plot the ICE candidate pair config events.");
WEBRTC_DEFINE_bool(plot_ice_connectivity_check,
false,
"Plot the ICE candidate pair connectivity checks.");
DEFINE_string(
WEBRTC_DEFINE_string(
force_fieldtrials,
"",
"Field trials control experimental feature code which can be forced. "
"E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enabled/"
" will assign the group Enabled to field trial WebRTC-FooFeature. Multiple "
"trials are separated by \"/\"");
DEFINE_string(wav_filename,
"",
"Path to wav file used for simulation of jitter buffer");
DEFINE_bool(help, false, "prints this message");
WEBRTC_DEFINE_string(wav_filename,
"",
"Path to wav file used for simulation of jitter buffer");
WEBRTC_DEFINE_bool(help, false, "prints this message");
DEFINE_bool(show_detector_state,
false,
"Show the state of the delay based BWE detector on the total "
"bitrate graph");
WEBRTC_DEFINE_bool(
show_detector_state,
false,
"Show the state of the delay based BWE detector on the total "
"bitrate graph");
DEFINE_bool(show_alr_state,
false,
"Show the state ALR state on the total bitrate graph");
WEBRTC_DEFINE_bool(show_alr_state,
false,
"Show the state ALR state on the total bitrate graph");
DEFINE_bool(parse_unconfigured_header_extensions,
true,
"Attempt to parse unconfigured header extensions using the default "
"WebRTC mapping. This can give very misleading results if the "
"application negotiates a different mapping.");
WEBRTC_DEFINE_bool(
parse_unconfigured_header_extensions,
true,
"Attempt to parse unconfigured header extensions using the default "
"WebRTC mapping. This can give very misleading results if the "
"application negotiates a different mapping.");
DEFINE_bool(print_triage_alerts,
false,
"Print triage alerts, i.e. a list of potential problems.");
WEBRTC_DEFINE_bool(print_triage_alerts,
false,
"Print triage alerts, i.e. a list of potential problems.");
DEFINE_bool(normalize_time,
true,
"Normalize the log timestamps so that the call starts at time 0.");
WEBRTC_DEFINE_bool(
normalize_time,
true,
"Normalize the log timestamps so that the call starts at time 0.");
DEFINE_bool(protobuf_output,
false,
"Output charts as protobuf instead of python code.");
WEBRTC_DEFINE_bool(protobuf_output,
false,
"Output charts as protobuf instead of python code.");
void SetAllPlotFlags(bool setting);

View File

@ -29,27 +29,34 @@ RTC_PUSH_IGNORING_WUNDEF()
RTC_POP_IGNORING_WUNDEF()
// TODO(andrew): unpack more of the data.
DEFINE_string(input_file, "input", "The name of the input stream file.");
DEFINE_string(output_file,
"ref_out",
"The name of the reference output stream file.");
DEFINE_string(reverse_file,
"reverse",
"The name of the reverse input stream file.");
DEFINE_string(delay_file, "delay.int32", "The name of the delay file.");
DEFINE_string(drift_file, "drift.int32", "The name of the drift file.");
DEFINE_string(level_file, "level.int32", "The name of the level file.");
DEFINE_string(keypress_file, "keypress.bool", "The name of the keypress file.");
DEFINE_string(callorder_file,
"callorder",
"The name of the render/capture call order file.");
DEFINE_string(settings_file, "settings.txt", "The name of the settings file.");
DEFINE_bool(full, false, "Unpack the full set of files (normally not needed).");
DEFINE_bool(raw, false, "Write raw data instead of a WAV file.");
DEFINE_bool(text,
false,
"Write non-audio files as text files instead of binary files.");
DEFINE_bool(help, false, "Print this message.");
WEBRTC_DEFINE_string(input_file, "input", "The name of the input stream file.");
WEBRTC_DEFINE_string(output_file,
"ref_out",
"The name of the reference output stream file.");
WEBRTC_DEFINE_string(reverse_file,
"reverse",
"The name of the reverse input stream file.");
WEBRTC_DEFINE_string(delay_file, "delay.int32", "The name of the delay file.");
WEBRTC_DEFINE_string(drift_file, "drift.int32", "The name of the drift file.");
WEBRTC_DEFINE_string(level_file, "level.int32", "The name of the level file.");
WEBRTC_DEFINE_string(keypress_file,
"keypress.bool",
"The name of the keypress file.");
WEBRTC_DEFINE_string(callorder_file,
"callorder",
"The name of the render/capture call order file.");
WEBRTC_DEFINE_string(settings_file,
"settings.txt",
"The name of the settings file.");
WEBRTC_DEFINE_bool(full,
false,
"Unpack the full set of files (normally not needed).");
WEBRTC_DEFINE_bool(raw, false, "Write raw data instead of a WAV file.");
WEBRTC_DEFINE_bool(
text,
false,
"Write non-audio files as text files instead of binary files.");
WEBRTC_DEFINE_bool(help, false, "Print this message.");
#define PRINT_CONFIG(field_name) \
if (msg.has_##field_name()) { \

View File

@ -16,7 +16,7 @@
#include "rtc_base/flags.h"
#include "test/testsupport/fileutils.h"
DEFINE_bool(scenario_logs, false, "Save logs from scenario framework.");
WEBRTC_DEFINE_bool(scenario_logs, false, "Save logs from scenario framework.");
namespace webrtc {
namespace test {

View File

@ -32,13 +32,13 @@
#if defined(WEBRTC_IOS)
#include "test/ios/test_support.h"
DEFINE_string(NSTreatUnknownArgumentsAsOpen,
"",
"Intentionally ignored flag intended for iOS simulator.");
DEFINE_string(ApplePersistenceIgnoreState,
"",
"Intentionally ignored flag intended for iOS simulator.");
DEFINE_bool(
WEBRTC_DEFINE_string(NSTreatUnknownArgumentsAsOpen,
"",
"Intentionally ignored flag intended for iOS simulator.");
WEBRTC_DEFINE_string(ApplePersistenceIgnoreState,
"",
"Intentionally ignored flag intended for iOS simulator.");
WEBRTC_DEFINE_bool(
save_chartjson_result,
false,
"Store the perf results in Documents/perf_result.json in the format "
@ -48,12 +48,12 @@ DEFINE_bool(
#else
DEFINE_string(
WEBRTC_DEFINE_string(
isolated_script_test_output,
"",
"Path to output an empty JSON file which Chromium infra requires.");
DEFINE_string(
WEBRTC_DEFINE_string(
isolated_script_test_perf_output,
"",
"Path where the perf results should be stored in the JSON format described "
@ -63,16 +63,16 @@ DEFINE_string(
#endif
DEFINE_bool(logs, false, "print logs to stderr");
WEBRTC_DEFINE_bool(logs, false, "print logs to stderr");
DEFINE_string(
WEBRTC_DEFINE_string(
force_fieldtrials,
"",
"Field trials control experimental feature code which can be forced. "
"E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
" will assign the group Enable to field trial WebRTC-FooFeature.");
DEFINE_bool(help, false, "Print this message.");
WEBRTC_DEFINE_bool(help, false, "Print this message.");
namespace webrtc {

View File

@ -24,9 +24,9 @@ const std::string& DefaultArtifactPath() {
}
} // namespace
DEFINE_string(test_artifacts_dir,
DefaultArtifactPath().c_str(),
"The output folder where test output should be saved.");
WEBRTC_DEFINE_string(test_artifacts_dir,
DefaultArtifactPath().c_str(),
"The output folder where test output should be saved.");
namespace webrtc {
namespace test {

View File

@ -21,7 +21,7 @@
#include "test/gtest.h"
#include "test/testsupport/fileutils.h"
DECLARE_string(test_artifacts_dir);
WEBRTC_DECLARE_string(test_artifacts_dir);
namespace webrtc {
namespace test {

View File

@ -22,21 +22,24 @@
namespace webrtc {
namespace flags {
DEFINE_string(rtc_event_log_name,
"",
"Filename for rtc event log. Two files "
"with \"_send\" and \"_recv\" suffixes will be created.");
WEBRTC_DEFINE_string(rtc_event_log_name,
"",
"Filename for rtc event log. Two files "
"with \"_send\" and \"_recv\" suffixes will be created.");
std::string RtcEventLogName() {
return static_cast<std::string>(FLAG_rtc_event_log_name);
}
DEFINE_string(rtp_dump_name, "", "Filename for dumped received RTP stream.");
WEBRTC_DEFINE_string(rtp_dump_name,
"",
"Filename for dumped received RTP stream.");
std::string RtpDumpName() {
return static_cast<std::string>(FLAG_rtp_dump_name);
}
DEFINE_string(encoded_frame_path,
"",
"The base path for encoded frame logs. Created files will have "
"the form <encoded_frame_path>.<n>.(recv|send.<m>).ivf");
WEBRTC_DEFINE_string(
encoded_frame_path,
"",
"The base path for encoded frame logs. Created files will have "
"the form <encoded_frame_path>.<n>.(recv|send.<m>).ivf");
std::string EncodedFramePath() {
return static_cast<std::string>(FLAG_encoded_frame_path);
}

View File

@ -72,39 +72,39 @@ namespace flags {
// TODO(pbos): Multiple receivers.
// Flag for payload type.
DEFINE_int(media_payload_type,
test::CallTest::kPayloadTypeVP8,
"Media payload type");
WEBRTC_DEFINE_int(media_payload_type,
test::CallTest::kPayloadTypeVP8,
"Media payload type");
static int MediaPayloadType() {
return static_cast<int>(FLAG_media_payload_type);
}
// Flag for RED payload type.
DEFINE_int(red_payload_type,
test::CallTest::kRedPayloadType,
"RED payload type");
WEBRTC_DEFINE_int(red_payload_type,
test::CallTest::kRedPayloadType,
"RED payload type");
static int RedPayloadType() {
return static_cast<int>(FLAG_red_payload_type);
}
// Flag for ULPFEC payload type.
DEFINE_int(ulpfec_payload_type,
test::CallTest::kUlpfecPayloadType,
"ULPFEC payload type");
WEBRTC_DEFINE_int(ulpfec_payload_type,
test::CallTest::kUlpfecPayloadType,
"ULPFEC payload type");
static int UlpfecPayloadType() {
return static_cast<int>(FLAG_ulpfec_payload_type);
}
DEFINE_int(media_payload_type_rtx,
test::CallTest::kSendRtxPayloadType,
"Media over RTX payload type");
WEBRTC_DEFINE_int(media_payload_type_rtx,
test::CallTest::kSendRtxPayloadType,
"Media over RTX payload type");
static int MediaPayloadTypeRtx() {
return static_cast<int>(FLAG_media_payload_type_rtx);
}
DEFINE_int(red_payload_type_rtx,
test::CallTest::kRtxRedPayloadType,
"RED over RTX payload type");
WEBRTC_DEFINE_int(red_payload_type_rtx,
test::CallTest::kRtxRedPayloadType,
"RED over RTX payload type");
static int RedPayloadTypeRtx() {
return static_cast<int>(FLAG_red_payload_type_rtx);
}
@ -115,7 +115,7 @@ const std::string& DefaultSsrc() {
std::to_string(test::CallTest::kVideoSendSsrcs[0]);
return ssrc;
}
DEFINE_string(ssrc, DefaultSsrc().c_str(), "Incoming SSRC");
WEBRTC_DEFINE_string(ssrc, DefaultSsrc().c_str(), "Incoming SSRC");
static uint32_t Ssrc() {
return rtc::StringToNumber<uint32_t>(FLAG_ssrc).value();
}
@ -125,54 +125,56 @@ const std::string& DefaultSsrcRtx() {
std::to_string(test::CallTest::kSendRtxSsrcs[0]);
return ssrc_rtx;
}
DEFINE_string(ssrc_rtx, DefaultSsrcRtx().c_str(), "Incoming RTX SSRC");
WEBRTC_DEFINE_string(ssrc_rtx, DefaultSsrcRtx().c_str(), "Incoming RTX SSRC");
static uint32_t SsrcRtx() {
return rtc::StringToNumber<uint32_t>(FLAG_ssrc_rtx).value();
}
// Flag for abs-send-time id.
DEFINE_int(abs_send_time_id, -1, "RTP extension ID for abs-send-time");
WEBRTC_DEFINE_int(abs_send_time_id, -1, "RTP extension ID for abs-send-time");
static int AbsSendTimeId() {
return static_cast<int>(FLAG_abs_send_time_id);
}
// Flag for transmission-offset id.
DEFINE_int(transmission_offset_id,
-1,
"RTP extension ID for transmission-offset");
WEBRTC_DEFINE_int(transmission_offset_id,
-1,
"RTP extension ID for transmission-offset");
static int TransmissionOffsetId() {
return static_cast<int>(FLAG_transmission_offset_id);
}
// Flag for rtpdump input file.
DEFINE_string(input_file, "", "input file");
WEBRTC_DEFINE_string(input_file, "", "input file");
static std::string InputFile() {
return static_cast<std::string>(FLAG_input_file);
}
DEFINE_string(config_file, "", "config file");
WEBRTC_DEFINE_string(config_file, "", "config file");
static std::string ConfigFile() {
return static_cast<std::string>(FLAG_config_file);
}
// Flag for raw output files.
DEFINE_string(out_base, "", "Basename (excluding .jpg) for raw output");
WEBRTC_DEFINE_string(out_base, "", "Basename (excluding .jpg) for raw output");
static std::string OutBase() {
return static_cast<std::string>(FLAG_out_base);
}
DEFINE_string(decoder_bitstream_filename, "", "Decoder bitstream output file");
WEBRTC_DEFINE_string(decoder_bitstream_filename,
"",
"Decoder bitstream output file");
static std::string DecoderBitstreamFilename() {
return static_cast<std::string>(FLAG_decoder_bitstream_filename);
}
// Flag for video codec.
DEFINE_string(codec, "VP8", "Video codec");
WEBRTC_DEFINE_string(codec, "VP8", "Video codec");
static std::string Codec() {
return static_cast<std::string>(FLAG_codec);
}
DEFINE_bool(help, false, "Print this message.");
WEBRTC_DEFINE_bool(help, false, "Print this message.");
} // namespace flags
static const uint32_t kReceiverLocalSsrc = 0x123456;

View File

@ -22,73 +22,76 @@ namespace webrtc {
namespace flags {
// Flags common with video loopback, with different default values.
DEFINE_int(width, 1850, "Video width (crops source).");
WEBRTC_DEFINE_int(width, 1850, "Video width (crops source).");
size_t Width() {
return static_cast<size_t>(FLAG_width);
}
DEFINE_int(height, 1110, "Video height (crops source).");
WEBRTC_DEFINE_int(height, 1110, "Video height (crops source).");
size_t Height() {
return static_cast<size_t>(FLAG_height);
}
DEFINE_int(fps, 5, "Frames per second.");
WEBRTC_DEFINE_int(fps, 5, "Frames per second.");
int Fps() {
return static_cast<int>(FLAG_fps);
}
DEFINE_int(min_bitrate, 50, "Call and stream min bitrate in kbps.");
WEBRTC_DEFINE_int(min_bitrate, 50, "Call and stream min bitrate in kbps.");
int MinBitrateKbps() {
return static_cast<int>(FLAG_min_bitrate);
}
DEFINE_int(start_bitrate, 300, "Call start bitrate in kbps.");
WEBRTC_DEFINE_int(start_bitrate, 300, "Call start bitrate in kbps.");
int StartBitrateKbps() {
return static_cast<int>(FLAG_start_bitrate);
}
DEFINE_int(target_bitrate, 200, "Stream target bitrate in kbps.");
WEBRTC_DEFINE_int(target_bitrate, 200, "Stream target bitrate in kbps.");
int TargetBitrateKbps() {
return static_cast<int>(FLAG_target_bitrate);
}
DEFINE_int(max_bitrate, 1000, "Call and stream max bitrate in kbps.");
WEBRTC_DEFINE_int(max_bitrate, 1000, "Call and stream max bitrate in kbps.");
int MaxBitrateKbps() {
return static_cast<int>(FLAG_max_bitrate);
}
DEFINE_int(num_temporal_layers, 2, "Number of temporal layers to use.");
WEBRTC_DEFINE_int(num_temporal_layers, 2, "Number of temporal layers to use.");
int NumTemporalLayers() {
return static_cast<int>(FLAG_num_temporal_layers);
}
// Flags common with video loopback, with equal default values.
DEFINE_string(codec, "VP8", "Video codec to use.");
WEBRTC_DEFINE_string(codec, "VP8", "Video codec to use.");
std::string Codec() {
return static_cast<std::string>(FLAG_codec);
}
DEFINE_string(rtc_event_log_name,
"",
"Filename for rtc event log. Two files "
"with \"_send\" and \"_recv\" suffixes will be created.");
WEBRTC_DEFINE_string(rtc_event_log_name,
"",
"Filename for rtc event log. Two files "
"with \"_send\" and \"_recv\" suffixes will be created.");
std::string RtcEventLogName() {
return static_cast<std::string>(FLAG_rtc_event_log_name);
}
DEFINE_string(rtp_dump_name, "", "Filename for dumped received RTP stream.");
WEBRTC_DEFINE_string(rtp_dump_name,
"",
"Filename for dumped received RTP stream.");
std::string RtpDumpName() {
return static_cast<std::string>(FLAG_rtp_dump_name);
}
DEFINE_int(selected_tl,
-1,
"Temporal layer to show or analyze. -1 to disable filtering.");
WEBRTC_DEFINE_int(
selected_tl,
-1,
"Temporal layer to show or analyze. -1 to disable filtering.");
int SelectedTL() {
return static_cast<int>(FLAG_selected_tl);
}
DEFINE_int(
WEBRTC_DEFINE_int(
duration,
0,
"Duration of the test in seconds. If 0, rendered will be shown instead.");
@ -96,71 +99,74 @@ int DurationSecs() {
return static_cast<int>(FLAG_duration);
}
DEFINE_string(output_filename, "", "Target graph data filename.");
WEBRTC_DEFINE_string(output_filename, "", "Target graph data filename.");
std::string OutputFilename() {
return static_cast<std::string>(FLAG_output_filename);
}
DEFINE_string(graph_title,
"",
"If empty, title will be generated automatically.");
WEBRTC_DEFINE_string(graph_title,
"",
"If empty, title will be generated automatically.");
std::string GraphTitle() {
return static_cast<std::string>(FLAG_graph_title);
}
DEFINE_int(loss_percent, 0, "Percentage of packets randomly lost.");
WEBRTC_DEFINE_int(loss_percent, 0, "Percentage of packets randomly lost.");
int LossPercent() {
return static_cast<int>(FLAG_loss_percent);
}
DEFINE_int(link_capacity,
0,
"Capacity (kbps) of the fake link. 0 means infinite.");
WEBRTC_DEFINE_int(link_capacity,
0,
"Capacity (kbps) of the fake link. 0 means infinite.");
int LinkCapacityKbps() {
return static_cast<int>(FLAG_link_capacity);
}
DEFINE_int(queue_size, 0, "Size of the bottleneck link queue in packets.");
WEBRTC_DEFINE_int(queue_size,
0,
"Size of the bottleneck link queue in packets.");
int QueueSize() {
return static_cast<int>(FLAG_queue_size);
}
DEFINE_int(avg_propagation_delay_ms,
0,
"Average link propagation delay in ms.");
WEBRTC_DEFINE_int(avg_propagation_delay_ms,
0,
"Average link propagation delay in ms.");
int AvgPropagationDelayMs() {
return static_cast<int>(FLAG_avg_propagation_delay_ms);
}
DEFINE_int(std_propagation_delay_ms,
0,
"Link propagation delay standard deviation in ms.");
WEBRTC_DEFINE_int(std_propagation_delay_ms,
0,
"Link propagation delay standard deviation in ms.");
int StdPropagationDelayMs() {
return static_cast<int>(FLAG_std_propagation_delay_ms);
}
DEFINE_int(num_streams, 0, "Number of streams to show or analyze.");
WEBRTC_DEFINE_int(num_streams, 0, "Number of streams to show or analyze.");
int NumStreams() {
return static_cast<int>(FLAG_num_streams);
}
DEFINE_int(selected_stream,
0,
"ID of the stream to show or analyze. "
"Set to the number of streams to show them all.");
WEBRTC_DEFINE_int(selected_stream,
0,
"ID of the stream to show or analyze. "
"Set to the number of streams to show them all.");
int SelectedStream() {
return static_cast<int>(FLAG_selected_stream);
}
DEFINE_int(num_spatial_layers, 1, "Number of spatial layers to use.");
WEBRTC_DEFINE_int(num_spatial_layers, 1, "Number of spatial layers to use.");
int NumSpatialLayers() {
return static_cast<int>(FLAG_num_spatial_layers);
}
DEFINE_int(inter_layer_pred,
0,
"Inter-layer prediction mode. "
"0 - enabled, 1 - disabled, 2 - enabled only for key pictures.");
WEBRTC_DEFINE_int(
inter_layer_pred,
0,
"Inter-layer prediction mode. "
"0 - enabled, 1 - disabled, 2 - enabled only for key pictures.");
InterLayerPredMode InterLayerPred() {
if (FLAG_inter_layer_pred == 0) {
return InterLayerPredMode::kOn;
@ -172,58 +178,65 @@ InterLayerPredMode InterLayerPred() {
}
}
DEFINE_int(selected_sl,
-1,
"Spatial layer to show or analyze. -1 to disable filtering.");
WEBRTC_DEFINE_int(selected_sl,
-1,
"Spatial layer to show or analyze. -1 to disable filtering.");
int SelectedSL() {
return static_cast<int>(FLAG_selected_sl);
}
DEFINE_string(stream0,
"",
"Comma separated values describing VideoStream for stream #0.");
WEBRTC_DEFINE_string(
stream0,
"",
"Comma separated values describing VideoStream for stream #0.");
std::string Stream0() {
return static_cast<std::string>(FLAG_stream0);
}
DEFINE_string(stream1,
"",
"Comma separated values describing VideoStream for stream #1.");
WEBRTC_DEFINE_string(
stream1,
"",
"Comma separated values describing VideoStream for stream #1.");
std::string Stream1() {
return static_cast<std::string>(FLAG_stream1);
}
DEFINE_string(sl0,
"",
"Comma separated values describing SpatialLayer for layer #0.");
WEBRTC_DEFINE_string(
sl0,
"",
"Comma separated values describing SpatialLayer for layer #0.");
std::string SL0() {
return static_cast<std::string>(FLAG_sl0);
}
DEFINE_string(sl1,
"",
"Comma separated values describing SpatialLayer for layer #1.");
WEBRTC_DEFINE_string(
sl1,
"",
"Comma separated values describing SpatialLayer for layer #1.");
std::string SL1() {
return static_cast<std::string>(FLAG_sl1);
}
DEFINE_string(encoded_frame_path,
"",
"The base path for encoded frame logs. Created files will have "
"the form <encoded_frame_path>.<n>.(recv|send.<m>).ivf");
WEBRTC_DEFINE_string(
encoded_frame_path,
"",
"The base path for encoded frame logs. Created files will have "
"the form <encoded_frame_path>.<n>.(recv|send.<m>).ivf");
std::string EncodedFramePath() {
return static_cast<std::string>(FLAG_encoded_frame_path);
}
DEFINE_bool(logs, false, "print logs to stderr");
WEBRTC_DEFINE_bool(logs, false, "print logs to stderr");
DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
WEBRTC_DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
DEFINE_bool(generic_descriptor, false, "Use the generic frame descriptor.");
WEBRTC_DEFINE_bool(generic_descriptor,
false,
"Use the generic frame descriptor.");
DEFINE_bool(allow_reordering, false, "Allow packet reordering to occur");
WEBRTC_DEFINE_bool(allow_reordering, false, "Allow packet reordering to occur");
DEFINE_string(
WEBRTC_DEFINE_string(
force_fieldtrials,
"",
"Field trials control experimental feature code which can be forced. "
@ -232,12 +245,14 @@ DEFINE_string(
"trials are separated by \"/\"");
// Screenshare-specific flags.
DEFINE_int(min_transmit_bitrate, 400, "Min transmit bitrate incl. padding.");
WEBRTC_DEFINE_int(min_transmit_bitrate,
400,
"Min transmit bitrate incl. padding.");
int MinTransmitBitrateKbps() {
return FLAG_min_transmit_bitrate;
}
DEFINE_bool(
WEBRTC_DEFINE_bool(
generate_slides,
false,
"Whether to use randomly generated slides or read them from files.");
@ -245,14 +260,14 @@ bool GenerateSlides() {
return static_cast<int>(FLAG_generate_slides);
}
DEFINE_int(slide_change_interval,
10,
"Interval (in seconds) between simulated slide changes.");
WEBRTC_DEFINE_int(slide_change_interval,
10,
"Interval (in seconds) between simulated slide changes.");
int SlideChangeInterval() {
return static_cast<int>(FLAG_slide_change_interval);
}
DEFINE_int(
WEBRTC_DEFINE_int(
scroll_duration,
0,
"Duration (in seconds) during which a slide will be scrolled into place.");
@ -260,9 +275,10 @@ int ScrollDuration() {
return static_cast<int>(FLAG_scroll_duration);
}
DEFINE_string(slides,
"",
"Comma-separated list of *.yuv files to display as slides.");
WEBRTC_DEFINE_string(
slides,
"",
"Comma-separated list of *.yuv files to display as slides.");
std::vector<std::string> Slides() {
std::vector<std::string> slides;
std::string slides_list = FLAG_slides;
@ -270,7 +286,7 @@ std::vector<std::string> Slides() {
return slides;
}
DEFINE_bool(help, false, "prints this message");
WEBRTC_DEFINE_bool(help, false, "prints this message");
} // namespace flags

View File

@ -33,73 +33,79 @@ InterLayerPredMode IntToInterLayerPredMode(int inter_layer_pred) {
}
// Flags for video.
DEFINE_int(vwidth, 640, "Video width.");
WEBRTC_DEFINE_int(vwidth, 640, "Video width.");
size_t VideoWidth() {
return static_cast<size_t>(FLAG_vwidth);
}
DEFINE_int(vheight, 480, "Video height.");
WEBRTC_DEFINE_int(vheight, 480, "Video height.");
size_t VideoHeight() {
return static_cast<size_t>(FLAG_vheight);
}
DEFINE_int(vfps, 30, "Video frames per second.");
WEBRTC_DEFINE_int(vfps, 30, "Video frames per second.");
int VideoFps() {
return static_cast<int>(FLAG_vfps);
}
DEFINE_int(capture_device_index,
0,
"Capture device to select for video stream");
WEBRTC_DEFINE_int(capture_device_index,
0,
"Capture device to select for video stream");
size_t GetCaptureDevice() {
return static_cast<size_t>(FLAG_capture_device_index);
}
DEFINE_int(vtarget_bitrate, 400, "Video stream target bitrate in kbps.");
WEBRTC_DEFINE_int(vtarget_bitrate, 400, "Video stream target bitrate in kbps.");
int VideoTargetBitrateKbps() {
return static_cast<int>(FLAG_vtarget_bitrate);
}
DEFINE_int(vmin_bitrate, 100, "Video stream min bitrate in kbps.");
WEBRTC_DEFINE_int(vmin_bitrate, 100, "Video stream min bitrate in kbps.");
int VideoMinBitrateKbps() {
return static_cast<int>(FLAG_vmin_bitrate);
}
DEFINE_int(vmax_bitrate, 2000, "Video stream max bitrate in kbps.");
WEBRTC_DEFINE_int(vmax_bitrate, 2000, "Video stream max bitrate in kbps.");
int VideoMaxBitrateKbps() {
return static_cast<int>(FLAG_vmax_bitrate);
}
DEFINE_bool(suspend_below_min_bitrate,
false,
"Suspends video below the configured min bitrate.");
WEBRTC_DEFINE_bool(suspend_below_min_bitrate,
false,
"Suspends video below the configured min bitrate.");
DEFINE_int(vnum_temporal_layers,
1,
"Number of temporal layers for video. Set to 1-4 to override.");
WEBRTC_DEFINE_int(
vnum_temporal_layers,
1,
"Number of temporal layers for video. Set to 1-4 to override.");
int VideoNumTemporalLayers() {
return static_cast<int>(FLAG_vnum_temporal_layers);
}
DEFINE_int(vnum_streams, 0, "Number of video streams to show or analyze.");
WEBRTC_DEFINE_int(vnum_streams,
0,
"Number of video streams to show or analyze.");
int VideoNumStreams() {
return static_cast<int>(FLAG_vnum_streams);
}
DEFINE_int(vnum_spatial_layers, 1, "Number of video spatial layers to use.");
WEBRTC_DEFINE_int(vnum_spatial_layers,
1,
"Number of video spatial layers to use.");
int VideoNumSpatialLayers() {
return static_cast<int>(FLAG_vnum_spatial_layers);
}
DEFINE_int(vinter_layer_pred,
2,
"Video inter-layer prediction mode. "
"0 - enabled, 1 - disabled, 2 - enabled only for key pictures.");
WEBRTC_DEFINE_int(
vinter_layer_pred,
2,
"Video inter-layer prediction mode. "
"0 - enabled, 1 - disabled, 2 - enabled only for key pictures.");
InterLayerPredMode VideoInterLayerPred() {
return IntToInterLayerPredMode(FLAG_vinter_layer_pred);
}
DEFINE_string(
WEBRTC_DEFINE_string(
vstream0,
"",
"Comma separated values describing VideoStream for video stream #0.");
@ -107,7 +113,7 @@ std::string VideoStream0() {
return static_cast<std::string>(FLAG_vstream0);
}
DEFINE_string(
WEBRTC_DEFINE_string(
vstream1,
"",
"Comma separated values describing VideoStream for video stream #1.");
@ -115,7 +121,7 @@ std::string VideoStream1() {
return static_cast<std::string>(FLAG_vstream1);
}
DEFINE_string(
WEBRTC_DEFINE_string(
vsl0,
"",
"Comma separated values describing SpatialLayer for video layer #0.");
@ -123,7 +129,7 @@ std::string VideoSL0() {
return static_cast<std::string>(FLAG_vsl0);
}
DEFINE_string(
WEBRTC_DEFINE_string(
vsl1,
"",
"Comma separated values describing SpatialLayer for video layer #1.");
@ -131,98 +137,105 @@ std::string VideoSL1() {
return static_cast<std::string>(FLAG_vsl1);
}
DEFINE_int(vselected_tl,
-1,
"Temporal layer to show or analyze for screenshare. -1 to disable "
"filtering.");
WEBRTC_DEFINE_int(
vselected_tl,
-1,
"Temporal layer to show or analyze for screenshare. -1 to disable "
"filtering.");
int VideoSelectedTL() {
return static_cast<int>(FLAG_vselected_tl);
}
DEFINE_int(vselected_stream,
0,
"ID of the stream to show or analyze for screenshare."
"Set to the number of streams to show them all.");
WEBRTC_DEFINE_int(vselected_stream,
0,
"ID of the stream to show or analyze for screenshare."
"Set to the number of streams to show them all.");
int VideoSelectedStream() {
return static_cast<int>(FLAG_vselected_stream);
}
DEFINE_int(vselected_sl,
-1,
"Spatial layer to show or analyze for screenshare. -1 to disable "
"filtering.");
WEBRTC_DEFINE_int(
vselected_sl,
-1,
"Spatial layer to show or analyze for screenshare. -1 to disable "
"filtering.");
int VideoSelectedSL() {
return static_cast<int>(FLAG_vselected_sl);
}
// Flags for screenshare.
DEFINE_int(min_transmit_bitrate,
400,
"Min transmit bitrate incl. padding for screenshare.");
WEBRTC_DEFINE_int(min_transmit_bitrate,
400,
"Min transmit bitrate incl. padding for screenshare.");
int ScreenshareMinTransmitBitrateKbps() {
return FLAG_min_transmit_bitrate;
}
DEFINE_int(swidth, 1850, "Screenshare width (crops source).");
WEBRTC_DEFINE_int(swidth, 1850, "Screenshare width (crops source).");
size_t ScreenshareWidth() {
return static_cast<size_t>(FLAG_swidth);
}
DEFINE_int(sheight, 1110, "Screenshare height (crops source).");
WEBRTC_DEFINE_int(sheight, 1110, "Screenshare height (crops source).");
size_t ScreenshareHeight() {
return static_cast<size_t>(FLAG_sheight);
}
DEFINE_int(sfps, 5, "Frames per second for screenshare.");
WEBRTC_DEFINE_int(sfps, 5, "Frames per second for screenshare.");
int ScreenshareFps() {
return static_cast<int>(FLAG_sfps);
}
DEFINE_int(starget_bitrate, 100, "Screenshare stream target bitrate in kbps.");
WEBRTC_DEFINE_int(starget_bitrate,
100,
"Screenshare stream target bitrate in kbps.");
int ScreenshareTargetBitrateKbps() {
return static_cast<int>(FLAG_starget_bitrate);
}
DEFINE_int(smin_bitrate, 100, "Screenshare stream min bitrate in kbps.");
WEBRTC_DEFINE_int(smin_bitrate, 100, "Screenshare stream min bitrate in kbps.");
int ScreenshareMinBitrateKbps() {
return static_cast<int>(FLAG_smin_bitrate);
}
DEFINE_int(smax_bitrate, 2000, "Screenshare stream max bitrate in kbps.");
WEBRTC_DEFINE_int(smax_bitrate,
2000,
"Screenshare stream max bitrate in kbps.");
int ScreenshareMaxBitrateKbps() {
return static_cast<int>(FLAG_smax_bitrate);
}
DEFINE_int(snum_temporal_layers,
2,
"Number of temporal layers to use in screenshare.");
WEBRTC_DEFINE_int(snum_temporal_layers,
2,
"Number of temporal layers to use in screenshare.");
int ScreenshareNumTemporalLayers() {
return static_cast<int>(FLAG_snum_temporal_layers);
}
DEFINE_int(snum_streams,
0,
"Number of screenshare streams to show or analyze.");
WEBRTC_DEFINE_int(snum_streams,
0,
"Number of screenshare streams to show or analyze.");
int ScreenshareNumStreams() {
return static_cast<int>(FLAG_snum_streams);
}
DEFINE_int(snum_spatial_layers,
1,
"Number of screenshare spatial layers to use.");
WEBRTC_DEFINE_int(snum_spatial_layers,
1,
"Number of screenshare spatial layers to use.");
int ScreenshareNumSpatialLayers() {
return static_cast<int>(FLAG_snum_spatial_layers);
}
DEFINE_int(sinter_layer_pred,
0,
"Screenshare inter-layer prediction mode. "
"0 - enabled, 1 - disabled, 2 - enabled only for key pictures.");
WEBRTC_DEFINE_int(
sinter_layer_pred,
0,
"Screenshare inter-layer prediction mode. "
"0 - enabled, 1 - disabled, 2 - enabled only for key pictures.");
InterLayerPredMode ScreenshareInterLayerPred() {
return IntToInterLayerPredMode(FLAG_sinter_layer_pred);
}
DEFINE_string(
WEBRTC_DEFINE_string(
sstream0,
"",
"Comma separated values describing VideoStream for screenshare stream #0.");
@ -230,7 +243,7 @@ std::string ScreenshareStream0() {
return static_cast<std::string>(FLAG_sstream0);
}
DEFINE_string(
WEBRTC_DEFINE_string(
sstream1,
"",
"Comma separated values describing VideoStream for screenshare stream #1.");
@ -238,7 +251,7 @@ std::string ScreenshareStream1() {
return static_cast<std::string>(FLAG_sstream1);
}
DEFINE_string(
WEBRTC_DEFINE_string(
ssl0,
"",
"Comma separated values describing SpatialLayer for screenshare layer #0.");
@ -246,7 +259,7 @@ std::string ScreenshareSL0() {
return static_cast<std::string>(FLAG_ssl0);
}
DEFINE_string(
WEBRTC_DEFINE_string(
ssl1,
"",
"Comma separated values describing SpatialLayer for screenshare layer #1.");
@ -254,31 +267,33 @@ std::string ScreenshareSL1() {
return static_cast<std::string>(FLAG_ssl1);
}
DEFINE_int(sselected_tl,
-1,
"Temporal layer to show or analyze for screenshare. -1 to disable "
"filtering.");
WEBRTC_DEFINE_int(
sselected_tl,
-1,
"Temporal layer to show or analyze for screenshare. -1 to disable "
"filtering.");
int ScreenshareSelectedTL() {
return static_cast<int>(FLAG_sselected_tl);
}
DEFINE_int(sselected_stream,
0,
"ID of the stream to show or analyze for screenshare."
"Set to the number of streams to show them all.");
WEBRTC_DEFINE_int(sselected_stream,
0,
"ID of the stream to show or analyze for screenshare."
"Set to the number of streams to show them all.");
int ScreenshareSelectedStream() {
return static_cast<int>(FLAG_sselected_stream);
}
DEFINE_int(sselected_sl,
-1,
"Spatial layer to show or analyze for screenshare. -1 to disable "
"filtering.");
WEBRTC_DEFINE_int(
sselected_sl,
-1,
"Spatial layer to show or analyze for screenshare. -1 to disable "
"filtering.");
int ScreenshareSelectedSL() {
return static_cast<int>(FLAG_sselected_sl);
}
DEFINE_bool(
WEBRTC_DEFINE_bool(
generate_slides,
false,
"Whether to use randomly generated slides or read them from files.");
@ -286,14 +301,14 @@ bool GenerateSlides() {
return static_cast<int>(FLAG_generate_slides);
}
DEFINE_int(slide_change_interval,
10,
"Interval (in seconds) between simulated slide changes.");
WEBRTC_DEFINE_int(slide_change_interval,
10,
"Interval (in seconds) between simulated slide changes.");
int SlideChangeInterval() {
return static_cast<int>(FLAG_slide_change_interval);
}
DEFINE_int(
WEBRTC_DEFINE_int(
scroll_duration,
0,
"Duration (in seconds) during which a slide will be scrolled into place.");
@ -301,9 +316,10 @@ int ScrollDuration() {
return static_cast<int>(FLAG_scroll_duration);
}
DEFINE_string(slides,
"",
"Comma-separated list of *.yuv files to display as slides.");
WEBRTC_DEFINE_string(
slides,
"",
"Comma-separated list of *.yuv files to display as slides.");
std::vector<std::string> Slides() {
std::vector<std::string> slides;
std::string slides_list = FLAG_slides;
@ -312,31 +328,31 @@ std::vector<std::string> Slides() {
}
// Flags common with screenshare and video loopback, with equal default values.
DEFINE_int(start_bitrate, 600, "Call start bitrate in kbps.");
WEBRTC_DEFINE_int(start_bitrate, 600, "Call start bitrate in kbps.");
int StartBitrateKbps() {
return static_cast<int>(FLAG_start_bitrate);
}
DEFINE_string(codec, "VP8", "Video codec to use.");
WEBRTC_DEFINE_string(codec, "VP8", "Video codec to use.");
std::string Codec() {
return static_cast<std::string>(FLAG_codec);
}
DEFINE_bool(analyze_video,
false,
"Analyze video stream (if --duration is present)");
WEBRTC_DEFINE_bool(analyze_video,
false,
"Analyze video stream (if --duration is present)");
bool AnalyzeVideo() {
return static_cast<bool>(FLAG_analyze_video);
}
DEFINE_bool(analyze_screenshare,
false,
"Analyze screenshare stream (if --duration is present)");
WEBRTC_DEFINE_bool(analyze_screenshare,
false,
"Analyze screenshare stream (if --duration is present)");
bool AnalyzeScreenshare() {
return static_cast<bool>(FLAG_analyze_screenshare);
}
DEFINE_int(
WEBRTC_DEFINE_int(
duration,
0,
"Duration of the test in seconds. If 0, rendered will be shown instead.");
@ -344,100 +360,113 @@ int DurationSecs() {
return static_cast<int>(FLAG_duration);
}
DEFINE_string(output_filename, "", "Target graph data filename.");
WEBRTC_DEFINE_string(output_filename, "", "Target graph data filename.");
std::string OutputFilename() {
return static_cast<std::string>(FLAG_output_filename);
}
DEFINE_string(graph_title,
"",
"If empty, title will be generated automatically.");
WEBRTC_DEFINE_string(graph_title,
"",
"If empty, title will be generated automatically.");
std::string GraphTitle() {
return static_cast<std::string>(FLAG_graph_title);
}
DEFINE_int(loss_percent, 0, "Percentage of packets randomly lost.");
WEBRTC_DEFINE_int(loss_percent, 0, "Percentage of packets randomly lost.");
int LossPercent() {
return static_cast<int>(FLAG_loss_percent);
}
DEFINE_int(avg_burst_loss_length, -1, "Average burst length of lost packets.");
WEBRTC_DEFINE_int(avg_burst_loss_length,
-1,
"Average burst length of lost packets.");
int AvgBurstLossLength() {
return static_cast<int>(FLAG_avg_burst_loss_length);
}
DEFINE_int(link_capacity,
0,
"Capacity (kbps) of the fake link. 0 means infinite.");
WEBRTC_DEFINE_int(link_capacity,
0,
"Capacity (kbps) of the fake link. 0 means infinite.");
int LinkCapacityKbps() {
return static_cast<int>(FLAG_link_capacity);
}
DEFINE_int(queue_size, 0, "Size of the bottleneck link queue in packets.");
WEBRTC_DEFINE_int(queue_size,
0,
"Size of the bottleneck link queue in packets.");
int QueueSize() {
return static_cast<int>(FLAG_queue_size);
}
DEFINE_int(avg_propagation_delay_ms,
0,
"Average link propagation delay in ms.");
WEBRTC_DEFINE_int(avg_propagation_delay_ms,
0,
"Average link propagation delay in ms.");
int AvgPropagationDelayMs() {
return static_cast<int>(FLAG_avg_propagation_delay_ms);
}
DEFINE_string(rtc_event_log_name,
"",
"Filename for rtc event log. Two files "
"with \"_send\" and \"_recv\" suffixes will be created. "
"Works only when --duration is set.");
WEBRTC_DEFINE_string(rtc_event_log_name,
"",
"Filename for rtc event log. Two files "
"with \"_send\" and \"_recv\" suffixes will be created. "
"Works only when --duration is set.");
std::string RtcEventLogName() {
return static_cast<std::string>(FLAG_rtc_event_log_name);
}
DEFINE_string(rtp_dump_name, "", "Filename for dumped received RTP stream.");
WEBRTC_DEFINE_string(rtp_dump_name,
"",
"Filename for dumped received RTP stream.");
std::string RtpDumpName() {
return static_cast<std::string>(FLAG_rtp_dump_name);
}
DEFINE_int(std_propagation_delay_ms,
0,
"Link propagation delay standard deviation in ms.");
WEBRTC_DEFINE_int(std_propagation_delay_ms,
0,
"Link propagation delay standard deviation in ms.");
int StdPropagationDelayMs() {
return static_cast<int>(FLAG_std_propagation_delay_ms);
}
DEFINE_string(encoded_frame_path,
"",
"The base path for encoded frame logs. Created files will have "
"the form <encoded_frame_path>.<n>.(recv|send.<m>).ivf");
WEBRTC_DEFINE_string(
encoded_frame_path,
"",
"The base path for encoded frame logs. Created files will have "
"the form <encoded_frame_path>.<n>.(recv|send.<m>).ivf");
std::string EncodedFramePath() {
return static_cast<std::string>(FLAG_encoded_frame_path);
}
DEFINE_bool(logs, false, "print logs to stderr");
WEBRTC_DEFINE_bool(logs, false, "print logs to stderr");
DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
WEBRTC_DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
DEFINE_bool(generic_descriptor, false, "Use the generic frame descriptor.");
WEBRTC_DEFINE_bool(generic_descriptor,
false,
"Use the generic frame descriptor.");
DEFINE_bool(allow_reordering, false, "Allow packet reordering to occur");
WEBRTC_DEFINE_bool(allow_reordering, false, "Allow packet reordering to occur");
DEFINE_bool(use_ulpfec, false, "Use RED+ULPFEC forward error correction.");
WEBRTC_DEFINE_bool(use_ulpfec,
false,
"Use RED+ULPFEC forward error correction.");
DEFINE_bool(use_flexfec, false, "Use FlexFEC forward error correction.");
WEBRTC_DEFINE_bool(use_flexfec, false, "Use FlexFEC forward error correction.");
DEFINE_bool(audio, false, "Add audio stream");
WEBRTC_DEFINE_bool(audio, false, "Add audio stream");
DEFINE_bool(audio_video_sync,
false,
"Sync audio and video stream (no effect if"
" audio is false)");
WEBRTC_DEFINE_bool(audio_video_sync,
false,
"Sync audio and video stream (no effect if"
" audio is false)");
DEFINE_bool(audio_dtx, false, "Enable audio DTX (no effect if audio is false)");
WEBRTC_DEFINE_bool(audio_dtx,
false,
"Enable audio DTX (no effect if audio is false)");
DEFINE_bool(video, true, "Add video stream");
WEBRTC_DEFINE_bool(video, true, "Add video stream");
DEFINE_string(
WEBRTC_DEFINE_string(
force_fieldtrials,
"",
"Field trials control experimental feature code which can be forced. "
@ -446,15 +475,16 @@ DEFINE_string(
"trials are separated by \"/\"");
// Video-specific flags.
DEFINE_string(vclip,
"",
"Name of the clip to show. If empty, the camera is used. Use "
"\"Generator\" for chroma generator.");
WEBRTC_DEFINE_string(
vclip,
"",
"Name of the clip to show. If empty, the camera is used. Use "
"\"Generator\" for chroma generator.");
std::string VideoClip() {
return static_cast<std::string>(FLAG_vclip);
}
DEFINE_bool(help, false, "prints this message");
WEBRTC_DEFINE_bool(help, false, "prints this message");
} // namespace flags

View File

@ -25,10 +25,11 @@
#include "test/testsupport/perf_test.h"
#include "test/testsupport/test_artifacts.h"
DEFINE_bool(save_worst_frame,
false,
"Enable saving a frame with the lowest PSNR to a jpeg file in the "
"test_artifacts_dir");
WEBRTC_DEFINE_bool(
save_worst_frame,
false,
"Enable saving a frame with the lowest PSNR to a jpeg file in the "
"test_artifacts_dir");
namespace webrtc {
namespace {

View File

@ -22,61 +22,62 @@ namespace webrtc {
namespace flags {
// Flags common with screenshare loopback, with different default values.
DEFINE_int(width, 640, "Video width.");
WEBRTC_DEFINE_int(width, 640, "Video width.");
size_t Width() {
return static_cast<size_t>(FLAG_width);
}
DEFINE_int(height, 480, "Video height.");
WEBRTC_DEFINE_int(height, 480, "Video height.");
size_t Height() {
return static_cast<size_t>(FLAG_height);
}
DEFINE_int(fps, 30, "Frames per second.");
WEBRTC_DEFINE_int(fps, 30, "Frames per second.");
int Fps() {
return static_cast<int>(FLAG_fps);
}
DEFINE_int(capture_device_index, 0, "Capture device to select");
WEBRTC_DEFINE_int(capture_device_index, 0, "Capture device to select");
size_t GetCaptureDevice() {
return static_cast<size_t>(FLAG_capture_device_index);
}
DEFINE_int(min_bitrate, 50, "Call and stream min bitrate in kbps.");
WEBRTC_DEFINE_int(min_bitrate, 50, "Call and stream min bitrate in kbps.");
int MinBitrateKbps() {
return static_cast<int>(FLAG_min_bitrate);
}
DEFINE_int(start_bitrate, 300, "Call start bitrate in kbps.");
WEBRTC_DEFINE_int(start_bitrate, 300, "Call start bitrate in kbps.");
int StartBitrateKbps() {
return static_cast<int>(FLAG_start_bitrate);
}
DEFINE_int(target_bitrate, 800, "Stream target bitrate in kbps.");
WEBRTC_DEFINE_int(target_bitrate, 800, "Stream target bitrate in kbps.");
int TargetBitrateKbps() {
return static_cast<int>(FLAG_target_bitrate);
}
DEFINE_int(max_bitrate, 800, "Call and stream max bitrate in kbps.");
WEBRTC_DEFINE_int(max_bitrate, 800, "Call and stream max bitrate in kbps.");
int MaxBitrateKbps() {
return static_cast<int>(FLAG_max_bitrate);
}
DEFINE_bool(suspend_below_min_bitrate,
false,
"Suspends video below the configured min bitrate.");
WEBRTC_DEFINE_bool(suspend_below_min_bitrate,
false,
"Suspends video below the configured min bitrate.");
DEFINE_int(num_temporal_layers,
1,
"Number of temporal layers. Set to 1-4 to override.");
WEBRTC_DEFINE_int(num_temporal_layers,
1,
"Number of temporal layers. Set to 1-4 to override.");
int NumTemporalLayers() {
return static_cast<int>(FLAG_num_temporal_layers);
}
DEFINE_int(inter_layer_pred,
2,
"Inter-layer prediction mode. "
"0 - enabled, 1 - disabled, 2 - enabled only for key pictures.");
WEBRTC_DEFINE_int(
inter_layer_pred,
2,
"Inter-layer prediction mode. "
"0 - enabled, 1 - disabled, 2 - enabled only for key pictures.");
InterLayerPredMode InterLayerPred() {
if (FLAG_inter_layer_pred == 0) {
return InterLayerPredMode::kOn;
@ -89,19 +90,20 @@ InterLayerPredMode InterLayerPred() {
}
// Flags common with screenshare loopback, with equal default values.
DEFINE_string(codec, "VP8", "Video codec to use.");
WEBRTC_DEFINE_string(codec, "VP8", "Video codec to use.");
std::string Codec() {
return static_cast<std::string>(FLAG_codec);
}
DEFINE_int(selected_tl,
-1,
"Temporal layer to show or analyze. -1 to disable filtering.");
WEBRTC_DEFINE_int(
selected_tl,
-1,
"Temporal layer to show or analyze. -1 to disable filtering.");
int SelectedTL() {
return static_cast<int>(FLAG_selected_tl);
}
DEFINE_int(
WEBRTC_DEFINE_int(
duration,
0,
"Duration of the test in seconds. If 0, rendered will be shown instead.");
@ -109,156 +111,174 @@ int DurationSecs() {
return static_cast<int>(FLAG_duration);
}
DEFINE_string(output_filename, "", "Target graph data filename.");
WEBRTC_DEFINE_string(output_filename, "", "Target graph data filename.");
std::string OutputFilename() {
return static_cast<std::string>(FLAG_output_filename);
}
DEFINE_string(graph_title,
"",
"If empty, title will be generated automatically.");
WEBRTC_DEFINE_string(graph_title,
"",
"If empty, title will be generated automatically.");
std::string GraphTitle() {
return static_cast<std::string>(FLAG_graph_title);
}
DEFINE_int(loss_percent, 0, "Percentage of packets randomly lost.");
WEBRTC_DEFINE_int(loss_percent, 0, "Percentage of packets randomly lost.");
int LossPercent() {
return static_cast<int>(FLAG_loss_percent);
}
DEFINE_int(avg_burst_loss_length, -1, "Average burst length of lost packets.");
WEBRTC_DEFINE_int(avg_burst_loss_length,
-1,
"Average burst length of lost packets.");
int AvgBurstLossLength() {
return static_cast<int>(FLAG_avg_burst_loss_length);
}
DEFINE_int(link_capacity,
0,
"Capacity (kbps) of the fake link. 0 means infinite.");
WEBRTC_DEFINE_int(link_capacity,
0,
"Capacity (kbps) of the fake link. 0 means infinite.");
int LinkCapacityKbps() {
return static_cast<int>(FLAG_link_capacity);
}
DEFINE_int(queue_size, 0, "Size of the bottleneck link queue in packets.");
WEBRTC_DEFINE_int(queue_size,
0,
"Size of the bottleneck link queue in packets.");
int QueueSize() {
return static_cast<int>(FLAG_queue_size);
}
DEFINE_int(avg_propagation_delay_ms,
0,
"Average link propagation delay in ms.");
WEBRTC_DEFINE_int(avg_propagation_delay_ms,
0,
"Average link propagation delay in ms.");
int AvgPropagationDelayMs() {
return static_cast<int>(FLAG_avg_propagation_delay_ms);
}
DEFINE_string(rtc_event_log_name,
"",
"Filename for rtc event log. Two files "
"with \"_send\" and \"_recv\" suffixes will be created.");
WEBRTC_DEFINE_string(rtc_event_log_name,
"",
"Filename for rtc event log. Two files "
"with \"_send\" and \"_recv\" suffixes will be created.");
std::string RtcEventLogName() {
return static_cast<std::string>(FLAG_rtc_event_log_name);
}
DEFINE_string(rtp_dump_name, "", "Filename for dumped received RTP stream.");
WEBRTC_DEFINE_string(rtp_dump_name,
"",
"Filename for dumped received RTP stream.");
std::string RtpDumpName() {
return static_cast<std::string>(FLAG_rtp_dump_name);
}
DEFINE_int(std_propagation_delay_ms,
0,
"Link propagation delay standard deviation in ms.");
WEBRTC_DEFINE_int(std_propagation_delay_ms,
0,
"Link propagation delay standard deviation in ms.");
int StdPropagationDelayMs() {
return static_cast<int>(FLAG_std_propagation_delay_ms);
}
DEFINE_int(num_streams, 0, "Number of streams to show or analyze.");
WEBRTC_DEFINE_int(num_streams, 0, "Number of streams to show or analyze.");
int NumStreams() {
return static_cast<int>(FLAG_num_streams);
}
DEFINE_int(selected_stream,
0,
"ID of the stream to show or analyze. "
"Set to the number of streams to show them all.");
WEBRTC_DEFINE_int(selected_stream,
0,
"ID of the stream to show or analyze. "
"Set to the number of streams to show them all.");
int SelectedStream() {
return static_cast<int>(FLAG_selected_stream);
}
DEFINE_int(num_spatial_layers, 1, "Number of spatial layers to use.");
WEBRTC_DEFINE_int(num_spatial_layers, 1, "Number of spatial layers to use.");
int NumSpatialLayers() {
return static_cast<int>(FLAG_num_spatial_layers);
}
DEFINE_int(selected_sl,
-1,
"Spatial layer to show or analyze. -1 to disable filtering.");
WEBRTC_DEFINE_int(selected_sl,
-1,
"Spatial layer to show or analyze. -1 to disable filtering.");
int SelectedSL() {
return static_cast<int>(FLAG_selected_sl);
}
DEFINE_string(stream0,
"",
"Comma separated values describing VideoStream for stream #0.");
WEBRTC_DEFINE_string(
stream0,
"",
"Comma separated values describing VideoStream for stream #0.");
std::string Stream0() {
return static_cast<std::string>(FLAG_stream0);
}
DEFINE_string(stream1,
"",
"Comma separated values describing VideoStream for stream #1.");
WEBRTC_DEFINE_string(
stream1,
"",
"Comma separated values describing VideoStream for stream #1.");
std::string Stream1() {
return static_cast<std::string>(FLAG_stream1);
}
DEFINE_string(sl0,
"",
"Comma separated values describing SpatialLayer for layer #0.");
WEBRTC_DEFINE_string(
sl0,
"",
"Comma separated values describing SpatialLayer for layer #0.");
std::string SL0() {
return static_cast<std::string>(FLAG_sl0);
}
DEFINE_string(sl1,
"",
"Comma separated values describing SpatialLayer for layer #1.");
WEBRTC_DEFINE_string(
sl1,
"",
"Comma separated values describing SpatialLayer for layer #1.");
std::string SL1() {
return static_cast<std::string>(FLAG_sl1);
}
DEFINE_string(encoded_frame_path,
"",
"The base path for encoded frame logs. Created files will have "
"the form <encoded_frame_path>.<n>.(recv|send.<m>).ivf");
WEBRTC_DEFINE_string(
encoded_frame_path,
"",
"The base path for encoded frame logs. Created files will have "
"the form <encoded_frame_path>.<n>.(recv|send.<m>).ivf");
std::string EncodedFramePath() {
return static_cast<std::string>(FLAG_encoded_frame_path);
}
DEFINE_bool(logs, false, "print logs to stderr");
WEBRTC_DEFINE_bool(logs, false, "print logs to stderr");
DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
WEBRTC_DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
DEFINE_bool(generic_descriptor, false, "Use the generic frame descriptor.");
WEBRTC_DEFINE_bool(generic_descriptor,
false,
"Use the generic frame descriptor.");
DEFINE_bool(allow_reordering, false, "Allow packet reordering to occur");
WEBRTC_DEFINE_bool(allow_reordering, false, "Allow packet reordering to occur");
DEFINE_bool(use_ulpfec, false, "Use RED+ULPFEC forward error correction.");
WEBRTC_DEFINE_bool(use_ulpfec,
false,
"Use RED+ULPFEC forward error correction.");
DEFINE_bool(use_flexfec, false, "Use FlexFEC forward error correction.");
WEBRTC_DEFINE_bool(use_flexfec, false, "Use FlexFEC forward error correction.");
DEFINE_bool(audio, false, "Add audio stream");
WEBRTC_DEFINE_bool(audio, false, "Add audio stream");
DEFINE_bool(use_real_adm,
false,
"Use real ADM instead of fake (no effect if audio is false)");
WEBRTC_DEFINE_bool(
use_real_adm,
false,
"Use real ADM instead of fake (no effect if audio is false)");
DEFINE_bool(audio_video_sync,
false,
"Sync audio and video stream (no effect if"
" audio is false)");
WEBRTC_DEFINE_bool(audio_video_sync,
false,
"Sync audio and video stream (no effect if"
" audio is false)");
DEFINE_bool(audio_dtx, false, "Enable audio DTX (no effect if audio is false)");
WEBRTC_DEFINE_bool(audio_dtx,
false,
"Enable audio DTX (no effect if audio is false)");
DEFINE_bool(video, true, "Add video stream");
WEBRTC_DEFINE_bool(video, true, "Add video stream");
DEFINE_string(
WEBRTC_DEFINE_string(
force_fieldtrials,
"",
"Field trials control experimental feature code which can be forced. "
@ -267,14 +287,15 @@ DEFINE_string(
"trials are separated by \"/\"");
// Video-specific flags.
DEFINE_string(clip,
"",
"Name of the clip to show. If empty, using chroma generator.");
WEBRTC_DEFINE_string(
clip,
"",
"Name of the clip to show. If empty, using chroma generator.");
std::string Clip() {
return static_cast<std::string>(FLAG_clip);
}
DEFINE_bool(help, false, "prints this message");
WEBRTC_DEFINE_bool(help, false, "prints this message");
} // namespace flags