Use backticks not vertical bars to denote variables in comments for /media

Bug: webrtc:12338
Change-Id: Ia800a4017ede1f647b36f809ef3c5b37a2616fdd
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/226949
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Commit-Queue: Artem Titov <titovartem@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#34567}
This commit is contained in:
Artem Titov 2021-07-26 13:24:57 +02:00 committed by WebRTC LUCI CQ
parent 6a9838a3ff
commit 37f664f6d5
32 changed files with 156 additions and 156 deletions

View File

@ -38,7 +38,7 @@ class RTC_EXPORT AdaptedVideoTrackSource
~AdaptedVideoTrackSource() override;
protected:
// Allows derived classes to initialize |video_adapter_| with a custom
// Allows derived classes to initialize `video_adapter_` with a custom
// alignment.
explicit AdaptedVideoTrackSource(int required_alignment);
// Checks the apply_rotation() flag. If the frame needs rotation, and it is a

View File

@ -81,7 +81,7 @@ void FeedbackParams::Add(const FeedbackParam& param) {
return;
}
if (Has(param)) {
// Param already in |this|.
// Param already in `this`.
return;
}
params_.push_back(param);

View File

@ -78,7 +78,7 @@ struct RTC_EXPORT Codec {
bool Matches(const Codec& codec) const;
bool MatchesCapability(const webrtc::RtpCodecCapability& capability) const;
// Find the parameter for |name| and write the value to |out|.
// Find the parameter for `name` and write the value to `out`.
bool GetParam(const std::string& name, std::string* out) const;
bool GetParam(const std::string& name, int* out) const;
@ -92,8 +92,8 @@ struct RTC_EXPORT Codec {
bool HasFeedbackParam(const FeedbackParam& param) const;
void AddFeedbackParam(const FeedbackParam& param);
// Filter |this| feedbacks params such that only those shared by both |this|
// and |other| are kept.
// Filter `this` feedbacks params such that only those shared by both `this`
// and `other` are kept.
void IntersectFeedbackParams(const Codec& other);
virtual webrtc::RtpCodecParameters ToCodecParameters() const;
@ -176,7 +176,7 @@ struct RTC_EXPORT VideoCodec : public Codec {
bool operator!=(const VideoCodec& c) const { return !(*this == c); }
// Return packetization which both |local_codec| and |remote_codec| support.
// Return packetization which both `local_codec` and `remote_codec` support.
static absl::optional<std::string> IntersectPacketization(
const VideoCodec& local_codec,
const VideoCodec& remote_codec);
@ -202,7 +202,7 @@ struct RTC_EXPORT VideoCodec : public Codec {
void SetDefaultParameters();
};
// Get the codec setting associated with |payload_type|. If there
// Get the codec setting associated with `payload_type`. If there
// is no codec associated with that payload type it returns nullptr.
template <class Codec>
const Codec* FindCodecById(const std::vector<Codec>& codecs, int payload_type) {
@ -218,7 +218,7 @@ bool HasNack(const Codec& codec);
bool HasRemb(const Codec& codec);
bool HasRrtr(const Codec& codec);
bool HasTransportCc(const Codec& codec);
// Returns the first codec in |supported_codecs| that matches |codec|, or
// Returns the first codec in `supported_codecs` that matches `codec`, or
// nullptr if no codec matches.
const VideoCodec* FindMatchingCodec(
const std::vector<VideoCodec>& supported_codecs,

View File

@ -116,7 +116,7 @@ bool MediaChannel::DscpEnabled() const {
}
// This is the DSCP value used for both RTP and RTCP channels if DSCP is
// enabled. It can be changed at any time via |SetPreferredDscp|.
// enabled. It can be changed at any time via `SetPreferredDscp`.
rtc::DiffServCodePoint MediaChannel::PreferredDscp() const {
RTC_DCHECK_RUN_ON(network_thread_);
return preferred_dscp_;

View File

@ -278,7 +278,7 @@ class MediaChannel {
bool DscpEnabled() const;
// This is the DSCP value used for both RTP and RTCP channels if DSCP is
// enabled. It can be changed at any time via |SetPreferredDscp|.
// enabled. It can be changed at any time via `SetPreferredDscp`.
rtc::DiffServCodePoint PreferredDscp() const;
void SetPreferredDscp(rtc::DiffServCodePoint new_dscp);
@ -655,7 +655,7 @@ struct BandwidthEstimationInfo {
int64_t bucket_delay = 0;
};
// Maps from payload type to |RtpCodecParameters|.
// Maps from payload type to `RtpCodecParameters`.
typedef std::map<int, webrtc::RtpCodecParameters> RtpCodecParametersMap;
struct VoiceMediaInfo {
@ -778,7 +778,7 @@ class VoiceMediaChannel : public MediaChannel, public Delayable {
cricket::MediaType media_type() const override;
virtual bool SetSendParameters(const AudioSendParameters& params) = 0;
virtual bool SetRecvParameters(const AudioRecvParameters& params) = 0;
// Get the receive parameters for the incoming stream identified by |ssrc|.
// Get the receive parameters for the incoming stream identified by `ssrc`.
virtual webrtc::RtpParameters GetRtpReceiveParameters(
uint32_t ssrc) const = 0;
// Retrieve the receive parameters for the default receive
@ -799,9 +799,9 @@ class VoiceMediaChannel : public MediaChannel, public Delayable {
virtual bool SetDefaultOutputVolume(double volume) = 0;
// Returns if the telephone-event has been negotiated.
virtual bool CanInsertDtmf() = 0;
// Send a DTMF |event|. The DTMF out-of-band signal will be used.
// The |ssrc| should be either 0 or a valid send stream ssrc.
// The valid value for the |event| are 0 to 15 which corresponding to
// Send a DTMF `event`. The DTMF out-of-band signal will be used.
// The `ssrc` should be either 0 or a valid send stream ssrc.
// The valid value for the `event` are 0 to 15 which corresponding to
// DTMF event 0-9, *, #, A-D.
virtual bool InsertDtmf(uint32_t ssrc, int event, int duration) = 0;
// Gets quality stats for the channel.
@ -850,7 +850,7 @@ class VideoMediaChannel : public MediaChannel, public Delayable {
cricket::MediaType media_type() const override;
virtual bool SetSendParameters(const VideoSendParameters& params) = 0;
virtual bool SetRecvParameters(const VideoRecvParameters& params) = 0;
// Get the receive parameters for the incoming stream identified by |ssrc|.
// Get the receive parameters for the incoming stream identified by `ssrc`.
virtual webrtc::RtpParameters GetRtpReceiveParameters(
uint32_t ssrc) const = 0;
// Retrieve the receive parameters for the default receive
@ -861,7 +861,7 @@ class VideoMediaChannel : public MediaChannel, public Delayable {
// Starts or stops transmission (and potentially capture) of local video.
virtual bool SetSend(bool send) = 0;
// Configure stream for sending and register a source.
// The |ssrc| must correspond to a registered send stream.
// The `ssrc` must correspond to a registered send stream.
virtual bool SetVideoSend(
uint32_t ssrc,
const VideoOptions* options,
@ -883,13 +883,13 @@ class VideoMediaChannel : public MediaChannel, public Delayable {
virtual void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) = 0;
// Gets quality stats for the channel.
virtual bool GetStats(VideoMediaInfo* info) = 0;
// Set recordable encoded frame callback for |ssrc|
// Set recordable encoded frame callback for `ssrc`
virtual void SetRecordableEncodedFrameCallback(
uint32_t ssrc,
std::function<void(const webrtc::RecordableEncodedFrame&)> callback) = 0;
// Clear recordable encoded frame callback for |ssrc|
// Clear recordable encoded frame callback for `ssrc`
virtual void ClearRecordableEncodedFrameCallback(uint32_t ssrc) = 0;
// Cause generation of a keyframe for |ssrc|
// Cause generation of a keyframe for `ssrc`
virtual void GenerateKeyFrame(uint32_t ssrc) = 0;
virtual std::vector<webrtc::RtpSource> GetSources(uint32_t ssrc) const = 0;

View File

@ -67,7 +67,7 @@ extern const char kCodecParamMaxPlaybackRate[];
extern const char kParamValueTrue[];
// Parameters are stored as parameter/value pairs. For parameters who do not
// have a value, |kParamValueEmpty| should be used as value.
// have a value, `kParamValueEmpty` should be used as value.
extern const char kParamValueEmpty[];
// opus parameters.

View File

@ -69,7 +69,7 @@ void UpdateAbsSendTimeExtensionValue(uint8_t* extension_data,
extension_data[2] = static_cast<uint8_t>(send_time);
}
// Assumes |length| is actual packet length + tag length. Updates HMAC at end of
// Assumes `length` is actual packet length + tag length. Updates HMAC at end of
// the RTP packet.
void UpdateRtpAuthTag(uint8_t* rtp,
size_t length,
@ -359,7 +359,7 @@ bool ApplyPacketOptions(uint8_t* data,
RTC_DCHECK(data);
RTC_DCHECK(length);
// if there is no valid |rtp_sendtime_extension_id| and |srtp_auth_key| in
// if there is no valid `rtp_sendtime_extension_id` and `srtp_auth_key` in
// PacketOptions, nothing to be updated in this packet.
if (packet_time_params.rtp_sendtime_extension_id == -1 &&
packet_time_params.srtp_auth_key.empty()) {

View File

@ -50,10 +50,10 @@ RtpPacketType InferRtpPacketType(rtc::ArrayView<const char> packet);
// True if |payload type| is 0-127.
bool IsValidRtpPayloadType(int payload_type);
// True if |size| is appropriate for the indicated packet type.
// True if `size` is appropriate for the indicated packet type.
bool IsValidRtpPacketSize(RtpPacketType packet_type, size_t size);
// Returns "RTCP", "RTP" or "Unknown" according to |packet_type|.
// Returns "RTCP", "RTP" or "Unknown" according to `packet_type`.
absl::string_view RtpPacketTypeToString(RtpPacketType packet_type);
// Verifies that a packet has a valid RTP header.
@ -67,7 +67,7 @@ bool UpdateRtpAbsSendTimeExtension(uint8_t* rtp,
int extension_id,
uint64_t time_us);
// Applies specified |options| to the packet. It updates the absolute send time
// Applies specified `options` to the packet. It updates the absolute send time
// extension header if it is present present then updates HMAC.
bool RTC_EXPORT
ApplyPacketOptions(uint8_t* data,

View File

@ -67,9 +67,9 @@ static uint8_t kRtpMsgWithTwoByteAbsSendTimeExtension[] = {
};
// Index of AbsSendTimeExtn data in message
// |kRtpMsgWithOneByteAbsSendTimeExtension|.
// `kRtpMsgWithOneByteAbsSendTimeExtension`.
static const int kAstIndexInOneByteRtpMsg = 21;
// and in message |kRtpMsgWithTwoByteAbsSendTimeExtension|.
// and in message `kRtpMsgWithTwoByteAbsSendTimeExtension`.
static const int kAstIndexInTwoByteRtpMsg = 21;
static const rtc::ArrayView<const char> kPcmuFrameArrayView =

View File

@ -17,18 +17,18 @@
namespace webrtc {
// Generate codec parameters that will be used as answer in an SDP negotiation
// based on local supported parameters and remote offered parameters. Both
// |local_supported_params|, |remote_offered_params|, and |answer_params|
// `local_supported_params`, `remote_offered_params`, and `answer_params`
// represent sendrecv media descriptions, i.e they are a mix of both encode and
// decode capabilities. In theory, when the profile in |local_supported_params|
// represent a strict superset of the profile in |remote_offered_params|, we
// could limit the profile in |answer_params| to the profile in
// |remote_offered_params|. However, to simplify the code, each supported H264
// decode capabilities. In theory, when the profile in `local_supported_params`
// represent a strict superset of the profile in `remote_offered_params`, we
// could limit the profile in `answer_params` to the profile in
// `remote_offered_params`. However, to simplify the code, each supported H264
// profile should be listed explicitly in the list of local supported codecs,
// even if they are redundant. Then each local codec in the list should be
// tested one at a time against the remote codec, and only when the profiles are
// equal should this function be called. Therefore, this function does not need
// to handle profile intersection, and the profile of |local_supported_params|
// and |remote_offered_params| must be equal before calling this function. The
// to handle profile intersection, and the profile of `local_supported_params`
// and `remote_offered_params` must be equal before calling this function. The
// parameters that are used when negotiating are the level part of
// profile-level-id and level-asymmetry-allowed.
void H264GenerateProfileLevelIdForAnswer(

View File

@ -35,7 +35,7 @@ inline std::vector<T> MakeVector(const T a[], size_t s) {
}
#define MAKE_VECTOR(a) cricket::MakeVector(a, arraysize(a))
// Checks whether |codecs| contains |codec|; checks using Codec::Matches().
// Checks whether `codecs` contains `codec`; checks using Codec::Matches().
template <class C>
bool ContainsMatchingCodec(const std::vector<C>& codecs, const C& codec) {
typename std::vector<C>::const_iterator it;
@ -47,11 +47,11 @@ bool ContainsMatchingCodec(const std::vector<C>& codecs, const C& codec) {
return false;
}
// Create Simulcast StreamParams with given |ssrcs| and |cname|.
// Create Simulcast StreamParams with given `ssrcs` and `cname`.
cricket::StreamParams CreateSimStreamParams(const std::string& cname,
const std::vector<uint32_t>& ssrcs);
// Create Simulcast stream with given |ssrcs| and |rtx_ssrcs|.
// The number of |rtx_ssrcs| must match number of |ssrcs|.
// Create Simulcast stream with given `ssrcs` and `rtx_ssrcs`.
// The number of `rtx_ssrcs` must match number of `ssrcs`.
cricket::StreamParams CreateSimWithRtxStreamParams(
const std::string& cname,
const std::vector<uint32_t>& ssrcs,

View File

@ -36,14 +36,14 @@ struct Fraction {
}
// Determines number of output pixels if both width and height of an input of
// |input_pixels| pixels is scaled with the fraction numerator / denominator.
// `input_pixels` pixels is scaled with the fraction numerator / denominator.
int scale_pixel_count(int input_pixels) {
return (numerator * numerator * input_pixels) / (denominator * denominator);
}
};
// Round |value_to_round| to a multiple of |multiple|. Prefer rounding upwards,
// but never more than |max_value|.
// Round `value_to_round` to a multiple of `multiple`. Prefer rounding upwards,
// but never more than `max_value`.
int roundUp(int value_to_round, int multiple, int max_value) {
const int rounded_value =
(value_to_round + multiple - 1) / multiple * multiple;
@ -51,8 +51,8 @@ int roundUp(int value_to_round, int multiple, int max_value) {
: (max_value / multiple * multiple);
}
// Generates a scale factor that makes |input_pixels| close to |target_pixels|,
// but no higher than |max_pixels|.
// Generates a scale factor that makes `input_pixels` close to `target_pixels`,
// but no higher than `max_pixels`.
Fraction FindScale(int input_width,
int input_height,
int target_pixels,
@ -73,7 +73,7 @@ Fraction FindScale(int input_width,
Fraction best_scale = Fraction{1, 1};
if (variable_start_scale_factor) {
// Start scaling down by 2/3 depending on |input_width| and |input_height|.
// Start scaling down by 2/3 depending on `input_width` and `input_height`.
if (input_width % 3 == 0 && input_height % 3 == 0) {
// 2/3 (then alternates 3/4, 2/3, 3/4,...).
current_scale = Fraction{6, 6};
@ -152,7 +152,7 @@ bool VideoAdapter::KeepFrame(int64_t in_timestamp_ns) {
if (max_fps <= 0)
return false;
// If |max_framerate_request_| is not set, it will default to maxint, which
// If `max_framerate_request_` is not set, it will default to maxint, which
// will lead to a frame_interval_ns rounded to 0.
int64_t frame_interval_ns = rtc::kNumNanosecsPerSec / max_fps;
if (frame_interval_ns <= 0) {
@ -356,7 +356,7 @@ int VideoAdapter::GetTargetPixels() const {
float VideoAdapter::GetMaxFramerate() const {
webrtc::MutexLock lock(&mutex_);
// Minimum of |max_fps_| and |max_framerate_request_| is used to throttle
// Minimum of `max_fps_` and `max_framerate_request_` is used to throttle
// frame-rate.
int framerate = std::min(max_framerate_request_,
max_fps_.value_or(max_framerate_request_));

View File

@ -33,7 +33,7 @@ class RTC_EXPORT VideoAdapter {
public:
VideoAdapter();
// The source requests output frames whose width and height are divisible
// by |source_resolution_alignment|.
// by `source_resolution_alignment`.
explicit VideoAdapter(int source_resolution_alignment);
virtual ~VideoAdapter();
@ -52,7 +52,7 @@ class RTC_EXPORT VideoAdapter {
// DEPRECATED. Please use OnOutputFormatRequest below.
// TODO(asapersson): Remove this once it is no longer used.
// Requests the output frame size and frame interval from
// |AdaptFrameResolution| to not be larger than |format|. Also, the input
// `AdaptFrameResolution` to not be larger than `format`. Also, the input
// frame size will be cropped to match the requested aspect ratio. The
// requested aspect ratio is orientation agnostic and will be adjusted to
// maintain the input orientation, so it doesn't matter if e.g. 1280x720 or
@ -61,13 +61,13 @@ class RTC_EXPORT VideoAdapter {
void OnOutputFormatRequest(const absl::optional<VideoFormat>& format)
RTC_LOCKS_EXCLUDED(mutex_);
// Requests output frame size and frame interval from |AdaptFrameResolution|.
// |target_aspect_ratio|: The input frame size will be cropped to match the
// Requests output frame size and frame interval from `AdaptFrameResolution`.
// `target_aspect_ratio`: The input frame size will be cropped to match the
// requested aspect ratio. The aspect ratio is orientation agnostic and will
// be adjusted to maintain the input orientation (i.e. it doesn't matter if
// e.g. <1280,720> or <720,1280> is requested).
// |max_pixel_count|: The maximum output frame size.
// |max_fps|: The maximum output framerate.
// `max_pixel_count`: The maximum output frame size.
// `max_fps`: The maximum output framerate.
// Note: Should be called from the source only.
void OnOutputFormatRequest(
const absl::optional<std::pair<int, int>>& target_aspect_ratio,
@ -85,7 +85,7 @@ class RTC_EXPORT VideoAdapter {
const absl::optional<int>& max_portrait_pixel_count,
const absl::optional<int>& max_fps) RTC_LOCKS_EXCLUDED(mutex_);
// Requests the output frame size from |AdaptFrameResolution| to have as close
// Requests the output frame size from `AdaptFrameResolution` to have as close
// as possible to |sink_wants.target_pixel_count| pixels (if set)
// but no more than |sink_wants.max_pixel_count|.
// |sink_wants.max_framerate_fps| is essentially analogous to
@ -123,7 +123,7 @@ class RTC_EXPORT VideoAdapter {
// The fixed source resolution alignment requirement.
const int source_resolution_alignment_;
// The currently applied resolution alignment, as given by the requirements:
// - the fixed |source_resolution_alignment_|; and
// - the fixed `source_resolution_alignment_`; and
// - the latest |sink_wants.resolution_alignment|.
int resolution_alignment_ RTC_GUARDED_BY(mutex_);

View File

@ -30,7 +30,7 @@ void VideoBroadcaster::AddOrUpdateSink(
RTC_DCHECK(sink != nullptr);
webrtc::MutexLock lock(&sinks_and_wants_lock_);
if (!FindSinkPair(sink)) {
// |Sink| is a new sink, which didn't receive previous frame.
// `Sink` is a new sink, which didn't receive previous frame.
previous_frame_sent_to_all_sinks_ = false;
}
VideoSourceBase::AddOrUpdateSink(sink, wants);

View File

@ -213,10 +213,10 @@ struct RTC_EXPORT VideoFormat : VideoFormatPod {
std::string ToString() const;
};
// Returns the largest positive integer that divides both |a| and |b|.
// Returns the largest positive integer that divides both `a` and `b`.
int GreatestCommonDivisor(int a, int b);
// Returns the smallest positive integer that is divisible by both |a| and |b|.
// Returns the smallest positive integer that is divisible by both `a` and `b`.
int LeastCommonMultiple(int a, int b);
} // namespace cricket

View File

@ -42,7 +42,7 @@ namespace webrtc {
// - Select "multiplex" codec in SDP negotiation.
class RTC_EXPORT MultiplexEncoderFactory : public VideoEncoderFactory {
public:
// |supports_augmenting_data| defines if the encoder would support augmenting
// `supports_augmenting_data` defines if the encoder would support augmenting
// data. If set, the encoder expects to receive video frame buffers of type
// AugmentedVideoFrameBuffer.
MultiplexEncoderFactory(std::unique_ptr<VideoEncoderFactory> factory,
@ -59,7 +59,7 @@ class RTC_EXPORT MultiplexEncoderFactory : public VideoEncoderFactory {
class RTC_EXPORT MultiplexDecoderFactory : public VideoDecoderFactory {
public:
// |supports_augmenting_data| defines if the decoder would support augmenting
// `supports_augmenting_data` defines if the decoder would support augmenting
// data. If set, the decoder is expected to output video frame buffers of type
// AugmentedVideoFrameBuffer.
MultiplexDecoderFactory(std::unique_ptr<VideoDecoderFactory> factory,

View File

@ -27,12 +27,12 @@ class PayloadTypeMapper {
PayloadTypeMapper();
~PayloadTypeMapper();
// Finds the current payload type for |format| or assigns a new one, if no
// Finds the current payload type for `format` or assigns a new one, if no
// current mapping exists. Will return an empty value if it was unable to
// create a mapping, i.e. if all dynamic payload type ids have been used up.
absl::optional<int> GetMappingFor(const webrtc::SdpAudioFormat& format);
// Finds the current payload type for |format|, if any. Returns an empty value
// Finds the current payload type for `format`, if any. Returns an empty value
// if no payload type mapping exists for the format.
absl::optional<int> FindMappingFor(
const webrtc::SdpAudioFormat& format) const;

View File

@ -71,16 +71,16 @@ struct SimulcastFormat {
int width;
int height;
// The maximum number of simulcast layers can be used for
// resolutions at |widthxheight| for legacy applications.
// resolutions at `widthxheight` for legacy applications.
size_t max_layers;
// The maximum bitrate for encoding stream at |widthxheight|, when we are
// The maximum bitrate for encoding stream at `widthxheight`, when we are
// not sending the next higher spatial stream.
webrtc::DataRate max_bitrate;
// The target bitrate for encoding stream at |widthxheight|, when this layer
// The target bitrate for encoding stream at `widthxheight`, when this layer
// is not the highest layer (i.e., when we are sending another higher spatial
// stream).
webrtc::DataRate target_bitrate;
// The minimum bitrate needed for encoding stream at |widthxheight|.
// The minimum bitrate needed for encoding stream at `widthxheight`.
webrtc::DataRate min_bitrate;
};
@ -210,7 +210,7 @@ SimulcastFormat InterpolateSimulcastFormat(
const float rate = (total_pixels_up - total_pixels) /
static_cast<float>(total_pixels_up - total_pixels_down);
// Use upper resolution if |rate| is below the configured threshold.
// Use upper resolution if `rate` is below the configured threshold.
size_t max_layers = (rate < max_roundup_rate.value_or(kDefaultMaxRoundupRate))
? formats[index - 1].max_layers
: formats[index].max_layers;
@ -296,7 +296,7 @@ size_t LimitSimulcastLayerCount(int width,
"Disabled")) {
// Max layers from one higher resolution in kSimulcastFormats will be used
// if the ratio (pixels_up - pixels) / (pixels_up - pixels_down) is less
// than configured |max_ratio|. pixels_down is the selected index in
// than configured `max_ratio`. pixels_down is the selected index in
// kSimulcastFormats based on pixels.
webrtc::FieldTrialOptional<double> max_ratio("max_ratio");
webrtc::ParseFieldTrial({&max_ratio},
@ -369,8 +369,8 @@ std::vector<webrtc::VideoStream> GetNormalSimulcastLayers(
// 1|.
width = NormalizeSimulcastSize(width, layer_count);
height = NormalizeSimulcastSize(height, layer_count);
// Add simulcast streams, from highest resolution (|s| = num_simulcast_layers
// -1) to lowest resolution at |s| = 0.
// Add simulcast streams, from highest resolution (`s` = num_simulcast_layers
// -1) to lowest resolution at `s` = 0.
for (size_t s = layer_count - 1;; --s) {
layers[s].width = width;
layers[s].height = height;

View File

@ -21,12 +21,12 @@
namespace cricket {
// Gets the total maximum bitrate for the |streams|.
// Gets the total maximum bitrate for the `streams`.
webrtc::DataRate GetTotalMaxBitrate(
const std::vector<webrtc::VideoStream>& streams);
// Adds any bitrate of |max_bitrate| that is above the total maximum bitrate for
// the |layers| to the highest quality layer.
// Adds any bitrate of `max_bitrate` that is above the total maximum bitrate for
// the `layers` to the highest quality layer.
void BoostMaxSimulcastLayer(webrtc::DataRate max_bitrate,
std::vector<webrtc::VideoStream>* layers);

View File

@ -287,7 +287,7 @@ int SimulcastEncoderAdapter::Release() {
RTC_DCHECK_RUN_ON(&encoder_queue_);
while (!stream_contexts_.empty()) {
// Move the encoder instances and put it on the |cached_encoder_contexts_|
// Move the encoder instances and put it on the `cached_encoder_contexts_`
// where it may possibly be reused from (ordering does not matter).
cached_encoder_contexts_.push_front(
std::move(stream_contexts_.back()).ReleaseEncoderContext());
@ -415,7 +415,7 @@ int SimulcastEncoderAdapter::InitEncode(
}
// Intercept frame encode complete callback only for upper streams, where
// we need to set a correct stream index. Set |parent| to nullptr for the
// we need to set a correct stream index. Set `parent` to nullptr for the
// lowest stream to bypass the callback.
SimulcastEncoderAdapter* parent = stream_idx > 0 ? this : nullptr;
@ -699,8 +699,8 @@ SimulcastEncoderAdapter::FetchOrCreateEncoderContext(
is_lowest_quality_stream &&
prefer_temporal_support_on_base_layer_;
// Toggling of |prefer_temporal_support| requires encoder recreation. Find
// and reuse encoder with desired |prefer_temporal_support|. Otherwise, if
// Toggling of `prefer_temporal_support` requires encoder recreation. Find
// and reuse encoder with desired `prefer_temporal_support`. Otherwise, if
// there is no such encoder in the cache, create a new instance.
auto encoder_context_iter =
std::find_if(cached_encoder_contexts_.begin(),
@ -769,7 +769,7 @@ webrtc::VideoCodec SimulcastEncoderAdapter::MakeStreamCodec(
codec_params.VP8()->numberOfTemporalLayers =
stream_params.numberOfTemporalLayers;
if (!is_highest_quality_stream) {
// For resolutions below CIF, set the codec |complexity| parameter to
// For resolutions below CIF, set the codec `complexity` parameter to
// kComplexityHigher, which maps to cpu_used = -4.
int pixels_per_frame = codec_params.width * codec_params.height;
if (pixels_per_frame < 352 * 288) {

View File

@ -43,8 +43,8 @@ class RTC_EXPORT SimulcastEncoderAdapter : public VideoEncoder {
// TODO(bugs.webrtc.org/11000): Remove when downstream usage is gone.
SimulcastEncoderAdapter(VideoEncoderFactory* primarty_factory,
const SdpVideoFormat& format);
// |primary_factory| produces the first-choice encoders to use.
// |fallback_factory|, if non-null, is used to create fallback encoder that
// `primary_factory` produces the first-choice encoders to use.
// `fallback_factory`, if non-null, is used to create fallback encoder that
// will be used if InitEncode() fails for the primary encoder.
SimulcastEncoderAdapter(VideoEncoderFactory* primary_factory,
VideoEncoderFactory* fallback_factory,
@ -147,7 +147,7 @@ class RTC_EXPORT SimulcastEncoderAdapter : public VideoEncoder {
void DestroyStoredEncoders();
// This method creates encoder. May reuse previously created encoders from
// |cached_encoder_contexts_|. It's const because it's used from
// `cached_encoder_contexts_`. It's const because it's used from
// const GetEncoderInfo().
std::unique_ptr<EncoderContext> FetchOrCreateEncoderContext(
bool is_lowest_quality_stream) const;
@ -182,7 +182,7 @@ class RTC_EXPORT SimulcastEncoderAdapter : public VideoEncoder {
// Store previously created and released encoders , so they don't have to be
// recreated. Remaining encoders are destroyed by the destructor.
// Marked as |mutable| becuase we may need to temporarily create encoder in
// Marked as `mutable` becuase we may need to temporarily create encoder in
// GetEncoderInfo(), which is const.
mutable std::list<std::unique_ptr<EncoderContext>> cached_encoder_contexts_;

View File

@ -186,7 +186,7 @@ class MockVideoEncoderFactory : public VideoEncoderFactory {
int32_t init_encode_return_value_ = 0;
std::vector<MockVideoEncoder*> encoders_;
std::vector<const char*> encoder_names_;
// Keep number of entries in sync with |kMaxSimulcastStreams|.
// Keep number of entries in sync with `kMaxSimulcastStreams`.
std::vector<int> requested_resolution_alignments_ = {1, 1, 1};
bool supports_simulcast_ = false;
};
@ -387,7 +387,7 @@ class TestSimulcastEncoderAdapterFakeHelper {
video_format_(video_format) {}
// Can only be called once as the SimulcastEncoderAdapter will take the
// ownership of |factory_|.
// ownership of `factory_`.
VideoEncoder* CreateMockEncoderAdapter() {
return new SimulcastEncoderAdapter(primary_factory_.get(),
fallback_factory_.get(), video_format_);
@ -433,8 +433,8 @@ class TestSimulcastEncoderAdapterFake : public ::testing::Test,
void ReSetUp() {
if (adapter_) {
adapter_->Release();
// |helper_| owns factories which |adapter_| needs to destroy encoders.
// Release |adapter_| before |helper_| (released in SetUp()).
// `helper_` owns factories which `adapter_` needs to destroy encoders.
// Release `adapter_` before `helper_` (released in SetUp()).
adapter_.reset();
}
SetUp();
@ -755,7 +755,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, DoesNotLeakEncoders) {
EXPECT_EQ(3u, helper_->factory()->encoders().size());
// The adapter should destroy all encoders it has allocated. Since
// |helper_->factory()| is owned by |adapter_|, however, we need to rely on
// |helper_->factory()| is owned by `adapter_`, however, we need to rely on
// lsan to find leaks here.
EXPECT_EQ(0, adapter_->Release());
adapter_.reset();

View File

@ -35,7 +35,7 @@ void UnhandledPacketsBuffer::AddPacket(uint32_t ssrc,
insert_pos_ = (insert_pos_ + 1) % kMaxStashedPackets;
}
// Backfill |consumer| with all stored packet related |ssrcs|.
// Backfill `consumer` with all stored packet related `ssrcs`.
void UnhandledPacketsBuffer::BackfillPackets(
rtc::ArrayView<const uint32_t> ssrcs,
std::function<void(uint32_t, int64_t, rtc::CopyOnWriteBuffer)> consumer) {

View File

@ -35,7 +35,7 @@ class UnhandledPacketsBuffer {
int64_t packet_time_us,
rtc::CopyOnWriteBuffer packet);
// Feed all packets with |ssrcs| into |consumer|.
// Feed all packets with `ssrcs` into `consumer`.
void BackfillPackets(
rtc::ArrayView<const uint32_t> ssrcs,
std::function<void(uint32_t, int64_t, rtc::CopyOnWriteBuffer)> consumer);

View File

@ -448,7 +448,7 @@ MergeInfoAboutOutboundRtpSubstreams(
webrtc::VideoSendStream::StreamStats& rtp_substream =
rtp_substreams[media_ssrc];
// We only merge |rtp_stats|. All other metrics are not applicable for RTX
// We only merge `rtp_stats`. All other metrics are not applicable for RTX
// and FlexFEC.
// TODO(hbos): kRtx and kFlexfec stats should use a separate struct to make
// it clear what is or is not applicable.
@ -1543,7 +1543,7 @@ void WebRtcVideoChannel::ConfigureReceiverRtp(
flexfec_config->protected_media_ssrcs = {ssrc};
flexfec_config->rtp.local_ssrc = config->rtp.local_ssrc;
flexfec_config->rtcp_mode = config->rtp.rtcp_mode;
// TODO(brandtr): We should be spec-compliant and set |transport_cc| here
// TODO(brandtr): We should be spec-compliant and set `transport_cc` here
// based on the rtcp-fb for the FlexFEC codec, not the media codec.
flexfec_config->rtp.transport_cc = config->rtp.transport_cc;
flexfec_config->rtp.extensions = config->rtp.extensions;
@ -1573,7 +1573,7 @@ void WebRtcVideoChannel::ResetUnsignaledRecvStream() {
last_unsignalled_ssrc_creation_time_ms_ = absl::nullopt;
// Delete any created default streams. This is needed to avoid SSRC collisions
// in Call's RtpDemuxer, in the case that |this| has created a default video
// in Call's RtpDemuxer, in the case that `this` has created a default video
// receiver, and then some other WebRtcVideoChannel gets the SSRC signaled
// in the corresponding Unified Plan "m=" section.
auto it = receive_streams_.begin();
@ -2179,7 +2179,7 @@ webrtc::DegradationPreference
WebRtcVideoChannel::WebRtcVideoSendStream::GetDegradationPreference() const {
// Do not adapt resolution for screen content as this will likely
// result in blurry and unreadable text.
// |this| acts like a VideoSource to make sure SinkWants are handled on the
// `this` acts like a VideoSource to make sure SinkWants are handled on the
// correct thread.
if (!enable_cpu_overuse_detection_) {
return webrtc::DegradationPreference::DISABLED;
@ -2263,7 +2263,7 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::SetCodec(
void WebRtcVideoChannel::WebRtcVideoSendStream::SetSendParameters(
const ChangedSendParameters& params) {
RTC_DCHECK_RUN_ON(&thread_checker_);
// |recreate_stream| means construction-time parameters have changed and the
// `recreate_stream` means construction-time parameters have changed and the
// sending stream needs to be reset with the new config.
bool recreate_stream = false;
if (params.rtcp_mode) {
@ -2552,7 +2552,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig(
void WebRtcVideoChannel::WebRtcVideoSendStream::ReconfigureEncoder() {
RTC_DCHECK_RUN_ON(&thread_checker_);
if (!stream_) {
// The webrtc::VideoSendStream |stream_| has not yet been created but other
// The webrtc::VideoSendStream `stream_` has not yet been created but other
// parameters has changed.
return;
}
@ -2632,8 +2632,8 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos(
common_info.aggregated_framerate_sent = stats.encode_frame_rate;
common_info.aggregated_huge_frames_sent = stats.huge_frames_sent;
// If we don't have any substreams, get the remaining metrics from |stats|.
// Otherwise, these values are obtained from |sub_stream| below.
// If we don't have any substreams, get the remaining metrics from `stats`.
// Otherwise, these values are obtained from `sub_stream` below.
if (stats.substreams.empty()) {
for (uint32_t ssrc : parameters_.config.rtp.ssrcs) {
common_info.add_ssrc(ssrc);
@ -2998,7 +2998,7 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetFeedbackParameters(
config_.rtp.nack.rtp_history_ms = nack_history_ms;
config_.rtp.transport_cc = transport_cc_enabled;
config_.rtp.rtcp_mode = rtcp_mode;
// TODO(brandtr): We should be spec-compliant and set |transport_cc| here
// TODO(brandtr): We should be spec-compliant and set `transport_cc` here
// based on the rtcp-fb for the FlexFEC codec, not the media codec.
flexfec_config_.rtp.transport_cc = config_.rtp.transport_cc;
flexfec_config_.rtcp_mode = config_.rtp.rtcp_mode;
@ -3298,7 +3298,7 @@ WebRtcVideoChannel::MapCodecs(const std::vector<VideoCodec>& codecs) {
std::vector<VideoCodecSettings> video_codecs;
std::map<int, VideoCodec::CodecType> payload_codec_type;
// |rtx_mapping| maps video payload type to rtx payload type.
// `rtx_mapping` maps video payload type to rtx payload type.
std::map<int, int> rtx_mapping;
std::map<int, int> rtx_time_mapping;

View File

@ -218,7 +218,7 @@ class WebRtcVideoChannel : public VideoMediaChannel,
std::vector<webrtc::RtpSource> GetSources(uint32_t ssrc) const override;
// Take the buffered packets for |ssrcs| and feed them into DeliverPacket.
// Take the buffered packets for `ssrcs` and feed them into DeliverPacket.
// This method does nothing unless unknown_ssrc_packet_buffer_ is configured.
void BackfillBufferedPackets(rtc::ArrayView<const uint32_t> ssrcs);
@ -258,12 +258,12 @@ class WebRtcVideoChannel : public VideoMediaChannel,
VideoCodecSettings();
// Checks if all members of |*this| are equal to the corresponding members
// of |other|.
// of `other`.
bool operator==(const VideoCodecSettings& other) const;
bool operator!=(const VideoCodecSettings& other) const;
// Checks if all members of |a|, except |flexfec_payload_type|, are equal
// to the corresponding members of |b|.
// Checks if all members of `a`, except `flexfec_payload_type`, are equal
// to the corresponding members of `b`.
static bool EqualsDisregardingFlexfec(const VideoCodecSettings& a,
const VideoCodecSettings& b);
@ -290,7 +290,7 @@ class WebRtcVideoChannel : public VideoMediaChannel,
// These optionals are unset if not changed.
absl::optional<std::vector<VideoCodecSettings>> codec_settings;
absl::optional<std::vector<webrtc::RtpExtension>> rtp_header_extensions;
// Keep track of the FlexFEC payload type separately from |codec_settings|.
// Keep track of the FlexFEC payload type separately from `codec_settings`.
// This allows us to recreate the FlexfecReceiveStream separately from the
// VideoReceiveStream when the FlexFEC payload type is changed.
absl::optional<int> flexfec_payload_type;
@ -389,8 +389,8 @@ class WebRtcVideoChannel : public VideoMediaChannel,
const VideoCodec& codec) const;
void ReconfigureEncoder();
// Calls Start or Stop according to whether or not |sending_| is true,
// and whether or not the encoding in |rtp_parameters_| is active.
// Calls Start or Stop according to whether or not `sending_` is true,
// and whether or not the encoding in `rtp_parameters_` is active.
void UpdateSendState();
webrtc::DegradationPreference GetDegradationPreference() const
@ -494,7 +494,7 @@ class WebRtcVideoChannel : public VideoMediaChannel,
webrtc::Call* const call_;
const StreamParams stream_params_;
// Both |stream_| and |flexfec_stream_| are managed by |this|. They are
// Both `stream_` and `flexfec_stream_` are managed by `this`. They are
// destroyed by calling call_->DestroyVideoReceiveStream and
// call_->DestroyFlexfecReceiveStream, respectively.
webrtc::VideoReceiveStream* stream_;
@ -577,8 +577,8 @@ class WebRtcVideoChannel : public VideoMediaChannel,
// criteria because the streams live on the worker thread and the demuxer
// lives on the network thread. Because packets are posted from the network
// thread to the worker thread, they can still be in-flight when streams are
// reconfgured. This can happen when |demuxer_criteria_id_| and
// |demuxer_criteria_completed_id_| don't match. During this time, we do not
// reconfgured. This can happen when `demuxer_criteria_id_` and
// `demuxer_criteria_completed_id_` don't match. During this time, we do not
// want to create unsignalled receive streams and should instead drop the
// packets. E.g:
// * If RemoveRecvStream(old_ssrc) was recently called, there may be packets

View File

@ -127,8 +127,8 @@ void VerifyCodecHasDefaultFeedbackParams(const cricket::VideoCodec& codec,
cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir)));
}
// Return true if any codec in |codecs| is an RTX codec with associated payload
// type |payload_type|.
// Return true if any codec in `codecs` is an RTX codec with associated payload
// type `payload_type`.
bool HasRtxCodec(const std::vector<cricket::VideoCodec>& codecs,
int payload_type) {
for (const cricket::VideoCodec& codec : codecs) {
@ -1102,7 +1102,7 @@ TEST_F(WebRtcVideoEngineTest, RegisterH264DecoderIfSupported) {
// Tests when GetSources is called with non-existing ssrc, it will return an
// empty list of RtpSource without crashing.
TEST_F(WebRtcVideoEngineTest, GetSourcesWithNonExistingSsrc) {
// Setup an recv stream with |kSsrc|.
// Setup an recv stream with `kSsrc`.
AddSupportedVideoCodecType("VP8");
cricket::VideoRecvParameters parameters;
parameters.codecs.push_back(GetEngineCodec("VP8"));
@ -1128,7 +1128,7 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, NullFactories) {
}
TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, EmptyFactories) {
// |engine| take ownership of the factories.
// `engine` take ownership of the factories.
webrtc::MockVideoEncoderFactory* encoder_factory =
new webrtc::MockVideoEncoderFactory();
webrtc::MockVideoDecoderFactory* decoder_factory =
@ -1151,7 +1151,7 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, EmptyFactories) {
// from the engine and that we will create a Vp8 encoder and decoder using the
// new factories.
TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) {
// |engine| take ownership of the factories.
// `engine` take ownership of the factories.
webrtc::MockVideoEncoderFactory* encoder_factory =
new webrtc::MockVideoEncoderFactory();
webrtc::MockVideoDecoderFactory* decoder_factory =
@ -1207,7 +1207,7 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) {
VerifyCodecHasDefaultFeedbackParams(engine_codecs.at(0),
/*lntf_expected=*/false);
// Mock encoder creation. |engine| take ownership of the encoder.
// Mock encoder creation. `engine` take ownership of the encoder.
webrtc::VideoEncoderFactory::CodecInfo codec_info;
codec_info.has_internal_source = false;
const webrtc::SdpVideoFormat format("VP8");
@ -1219,7 +1219,7 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) {
return std::make_unique<FakeWebRtcVideoEncoder>(nullptr);
});
// Mock decoder creation. |engine| take ownership of the decoder.
// Mock decoder creation. `engine` take ownership of the decoder.
EXPECT_CALL(*decoder_factory, CreateVideoDecoder(format)).WillOnce([] {
return std::make_unique<FakeWebRtcVideoDecoder>(nullptr);
});
@ -1276,7 +1276,7 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) {
// Test behavior when decoder factory fails to create a decoder (returns null).
TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, NullDecoder) {
// |engine| take ownership of the factories.
// `engine` take ownership of the factories.
webrtc::MockVideoEncoderFactory* encoder_factory =
new webrtc::MockVideoEncoderFactory();
webrtc::MockVideoDecoderFactory* decoder_factory =
@ -1373,7 +1373,7 @@ TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) {
options.video_noise_reduction.emplace(false);
EXPECT_TRUE(channel->SetVideoSend(kSsrc, &options, &frame_forwarder));
// Change back to regular video content, update encoder. Also change
// a non |is_screencast| option just to verify it doesn't affect recreation.
// a non `is_screencast` option just to verify it doesn't affect recreation.
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(3));
EXPECT_EQ(webrtc::VideoCodecMode::kRealtimeVideo,
@ -3573,7 +3573,7 @@ TEST_F(WebRtcVideoChannelTest, SetIdenticalOptionsDoesntReconfigureEncoder) {
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder));
EXPECT_EQ(1, send_stream->num_encoder_reconfigurations());
// Change |options| and expect 2 reconfigurations.
// Change `options` and expect 2 reconfigurations.
options.video_noise_reduction = true;
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder));
EXPECT_EQ(2, send_stream->num_encoder_reconfigurations());
@ -4367,7 +4367,7 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvCodecsWithFec) {
EXPECT_EQ(video_stream_config.rtp.rtcp_mode, flexfec_stream_config.rtcp_mode);
EXPECT_EQ(video_stream_config.rtcp_send_transport,
flexfec_stream_config.rtcp_send_transport);
// TODO(brandtr): Update this EXPECT when we set |transport_cc| in a
// TODO(brandtr): Update this EXPECT when we set `transport_cc` in a
// spec-compliant way.
EXPECT_EQ(video_stream_config.rtp.transport_cc,
flexfec_stream_config.rtp.transport_cc);
@ -7476,7 +7476,7 @@ TEST_F(WebRtcVideoChannelTest,
&frame_forwarder));
channel_->SetSend(true);
// Set |scale_resolution_down_by|'s.
// Set `scale_resolution_down_by`'s.
auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_);
ASSERT_EQ(rtp_parameters.encodings.size(), 3u);
rtp_parameters.encodings[0].scale_resolution_down_by = 1.0;
@ -7632,7 +7632,7 @@ TEST_F(WebRtcVideoChannelTest,
&frame_forwarder));
channel_->SetSend(true);
// Set |scale_resolution_down_by|'s.
// Set `scale_resolution_down_by`'s.
auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_);
ASSERT_EQ(rtp_parameters.encodings.size(), 3u);
rtp_parameters.encodings[0].scale_resolution_down_by = 1.0;
@ -7868,7 +7868,7 @@ TEST_F(WebRtcVideoChannelTest,
// FakeVideoSendStream calls CreateEncoderStreams, test that the vector of
// VideoStreams are created appropriately for the simulcast case.
// The maximum |max_framerate| is used, kDefaultVideoMaxFramerate: 60.
// The maximum `max_framerate` is used, kDefaultVideoMaxFramerate: 60.
EXPECT_EQ(kNumSimulcastStreams, stream->GetVideoStreams().size());
EXPECT_EQ(15, stream->GetVideoStreams()[0].max_framerate);
EXPECT_EQ(kDefaultVideoMaxFramerate,
@ -8640,7 +8640,7 @@ TEST_F(WebRtcVideoChannelTest,
rtp_packet.SetSsrc(kIncomingUnsignalledSsrc);
ReceivePacketAndAdvanceTime(rtp_packet.Buffer(), /* packet_time_us */ -1);
// The |ssrc| member should still be unset.
// The `ssrc` member should still be unset.
rtp_parameters = channel_->GetDefaultRtpReceiveParameters();
ASSERT_EQ(1u, rtp_parameters.encodings.size());
EXPECT_FALSE(rtp_parameters.encodings[0].ssrc);

View File

@ -171,8 +171,8 @@ int MinPositive(int a, int b) {
return std::min(a, b);
}
// |max_send_bitrate_bps| is the bitrate from "b=" in SDP.
// |rtp_max_bitrate_bps| is the bitrate from RtpSender::SetParameters.
// `max_send_bitrate_bps` is the bitrate from "b=" in SDP.
// `rtp_max_bitrate_bps` is the bitrate from RtpSender::SetParameters.
absl::optional<int> ComputeSendBitrate(int max_send_bitrate_bps,
absl::optional<int> rtp_max_bitrate_bps,
const webrtc::AudioCodecSpec& spec) {
@ -186,8 +186,8 @@ absl::optional<int> ComputeSendBitrate(int max_send_bitrate_bps,
}
if (bps < spec.info.min_bitrate_bps) {
// If codec is not multi-rate and |bps| is less than the fixed bitrate then
// fail. If codec is not multi-rate and |bps| exceeds or equal the fixed
// If codec is not multi-rate and `bps` is less than the fixed bitrate then
// fail. If codec is not multi-rate and `bps` exceeds or equal the fixed
// bitrate then ignore.
RTC_LOG(LS_ERROR) << "Failed to set codec " << spec.format.name
<< " to bitrate " << bps
@ -1003,7 +1003,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
number_of_frames, sample_rate, audio_frame->speech_type_,
audio_frame->vad_activity_, number_of_channels);
// TODO(bugs.webrtc.org/10739): add dcheck that
// |absolute_capture_timestamp_ms| always receives a value.
// `absolute_capture_timestamp_ms` always receives a value.
if (absolute_capture_timestamp_ms) {
audio_frame->set_absolute_capture_timestamp_ms(
*absolute_capture_timestamp_ms);
@ -1011,11 +1011,11 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
stream_->SendAudioData(std::move(audio_frame));
}
// Callback from the |source_| when it is going away. In case Start() has
// Callback from the `source_` when it is going away. In case Start() has
// never been called, this callback won't be triggered.
void OnClose() override {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
// Set |source_| to nullptr to make sure no more callback will get into
// Set `source_` to nullptr to make sure no more callback will get into
// the source.
source_ = nullptr;
UpdateSendState();
@ -1498,8 +1498,8 @@ webrtc::RTCError WebRtcVoiceMediaChannel::SetRtpSendParameters(
// |WebRtcAudioSendStream::SetRtpParameters()| which is called at the end,
// though there are two difference:
// 1. |WebRtcVoiceMediaChannel::SetChannelSendParameters()| only calls
// |SetSendCodec| while |WebRtcAudioSendStream::SetRtpParameters()| calls
// |SetSendCodecs|. The outcome should be the same.
// `SetSendCodec` while |WebRtcAudioSendStream::SetRtpParameters()| calls
// `SetSendCodecs`. The outcome should be the same.
// 2. AudioSendStream can be recreated.
// Codecs are handled at the WebRtcVoiceMediaChannel level.
@ -1998,7 +1998,7 @@ void WebRtcVoiceMediaChannel::ResetUnsignaledRecvStream() {
RTC_DCHECK_RUN_ON(worker_thread_);
RTC_LOG(LS_INFO) << "ResetUnsignaledRecvStream.";
unsignaled_stream_params_ = StreamParams();
// Create a copy since RemoveRecvStream will modify |unsignaled_recv_ssrcs_|.
// Create a copy since RemoveRecvStream will modify `unsignaled_recv_ssrcs_`.
std::vector<uint32_t> to_remove = unsignaled_recv_ssrcs_;
for (uint32_t ssrc : to_remove) {
RemoveRecvStream(ssrc);

View File

@ -395,10 +395,10 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam<bool> {
}
// Test that send bandwidth is set correctly.
// |codec| is the codec under test.
// |max_bitrate| is a parameter to set to SetMaxSendBandwidth().
// |expected_result| is the expected result from SetMaxSendBandwidth().
// |expected_bitrate| is the expected audio bitrate afterward.
// `codec` is the codec under test.
// `max_bitrate` is a parameter to set to SetMaxSendBandwidth().
// `expected_result` is the expected result from SetMaxSendBandwidth().
// `expected_bitrate` is the expected audio bitrate afterward.
void TestMaxSendBandwidth(const cricket::AudioCodec& codec,
int max_bitrate,
bool expected_result,
@ -1470,7 +1470,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersWithUnsignaledSsrc) {
// Receive PCMU packet (SSRC=1).
DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
// The |ssrc| member should still be unset.
// The `ssrc` member should still be unset.
rtp_parameters = channel_->GetDefaultRtpReceiveParameters();
ASSERT_EQ(1u, rtp_parameters.encodings.size());
EXPECT_FALSE(rtp_parameters.encodings[0].ssrc);
@ -3611,11 +3611,11 @@ TEST_P(WebRtcVoiceEngineTestFake, PreservePlayoutWhenRecreateRecvStream) {
// Tests when GetSources is called with non-existing ssrc, it will return an
// empty list of RtpSource without crashing.
TEST_P(WebRtcVoiceEngineTestFake, GetSourcesWithNonExistingSsrc) {
// Setup an recv stream with |kSsrcX|.
// Setup an recv stream with `kSsrcX`.
SetupRecvStream();
cricket::WebRtcVoiceMediaChannel* media_channel =
static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_);
// Call GetSources with |kSsrcY| which doesn't exist.
// Call GetSources with `kSsrcY` which doesn't exist.
std::vector<webrtc::RtpSource> sources = media_channel->GetSources(kSsrcY);
EXPECT_EQ(0u, sources.size());
}

View File

@ -86,11 +86,11 @@ class SctpTransportInternal {
// completes. This method can be called multiple times, though not if either
// of the ports are changed.
//
// |local_sctp_port| and |remote_sctp_port| are passed along the wire and the
// `local_sctp_port` and `remote_sctp_port` are passed along the wire and the
// listener and connector must be using the same port. They are not related
// to the ports at the IP level. If set to -1, we default to
// kSctpDefaultPort.
// |max_message_size_| sets the max message size on the connection.
// `max_message_size_` sets the max message size on the connection.
// It must be smaller than or equal to kSctpSendBufferSize.
// It can be changed by a secons Start() call.
//
@ -104,10 +104,10 @@ class SctpTransportInternal {
// NOTE: Initially there was a "Stop" method here, but it was never used, so
// it was removed.
// Informs SctpTransport that |sid| will start being used. Returns false if
// it is impossible to use |sid|, or if it's already in use.
// Until calling this, can't send data using |sid|.
// TODO(deadbeef): Actually implement the "returns false if |sid| can't be
// Informs SctpTransport that `sid` will start being used. Returns false if
// it is impossible to use `sid`, or if it's already in use.
// Until calling this, can't send data using `sid`.
// TODO(deadbeef): Actually implement the "returns false if `sid` can't be
// used" part. See:
// https://bugs.chromium.org/p/chromium/issues/detail?id=619849
virtual bool OpenStream(int sid) = 0;

View File

@ -304,7 +304,7 @@ class UsrsctpTransportMap {
return map_.erase(id) > 0;
}
// Posts |action| to the network thread of the transport identified by |id|
// Posts `action` to the network thread of the transport identified by `id`
// and returns true if found, all while holding a lock to protect against the
// transport being simultaneously deleted/deregistered, or returns false if
// not found.

View File

@ -68,10 +68,10 @@ struct SctpInboundPacket;
class UsrsctpTransport : public SctpTransportInternal,
public sigslot::has_slots<> {
public:
// |network_thread| is where packets will be processed and callbacks from
// `network_thread` is where packets will be processed and callbacks from
// this transport will be posted, and is the only thread on which public
// methods can be called.
// |transport| is not required (can be null).
// `transport` is not required (can be null).
UsrsctpTransport(rtc::Thread* network_thread,
rtc::PacketTransportInternal* transport);
~UsrsctpTransport() override;
@ -163,7 +163,7 @@ class UsrsctpTransport : public SctpTransportInternal,
// buffered message was accepted by the sctp lib.
bool SendBufferedMessage();
// Tries to send the |payload| on the usrsctp lib. The message will be
// Tries to send the `payload` on the usrsctp lib. The message will be
// advanced by the amount that was sent.
SendDataResult SendMessageInternal(OutgoingMessage* message);
@ -180,7 +180,7 @@ class UsrsctpTransport : public SctpTransportInternal,
void OnSendThresholdCallback();
sockaddr_conn GetSctpSockAddr(int port);
// Called using |invoker_| to send packet on the network.
// Called using `invoker_` to send packet on the network.
void OnPacketFromSctpToNetwork(const rtc::CopyOnWriteBuffer& buffer);
// Called on the network thread.
@ -189,10 +189,10 @@ class UsrsctpTransport : public SctpTransportInternal,
size_t length,
struct sctp_rcvinfo rcv,
int flags);
// Called using |invoker_| to decide what to do with the data.
// Called using `invoker_` to decide what to do with the data.
void OnDataFromSctpToTransport(const ReceiveDataParams& params,
const rtc::CopyOnWriteBuffer& buffer);
// Called using |invoker_| to decide what to do with the notification.
// Called using `invoker_` to decide what to do with the notification.
void OnNotificationFromSctp(const rtc::CopyOnWriteBuffer& buffer);
void OnNotificationAssocChange(const sctp_assoc_change& change);
@ -226,7 +226,7 @@ class UsrsctpTransport : public SctpTransportInternal,
// Has Start been called? Don't create SCTP socket until it has.
bool started_ = false;
// Are we ready to queue data (SCTP socket created, and not blocked due to
// congestion control)? Different than |transport_|'s "ready to send".
// congestion control)? Different than `transport_`'s "ready to send".
bool ready_to_send_data_ = false;
// Used to keep track of the status of each stream (or rather, each pair of
@ -268,7 +268,7 @@ class UsrsctpTransport : public SctpTransportInternal,
}
};
// Entries should only be removed from this map if |reset_complete| is
// Entries should only be removed from this map if `reset_complete` is
// true.
std::map<uint32_t, StreamStatus> stream_status_by_sid_;