Format /video folder

Formatting done via:

git ls-files | grep -E '^video\/.*\.(h|cc|mm)' | xargs clang-format -i

No-Iwyu: Includes didn't change and it isn't related to formatting
Bug: webrtc:42225392
Change-Id: Ie7a700221e73a07fab702f84afc3871ddef269f3
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/373904
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Commit-Queue: Harald Alvestrand <hta@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#43686}
This commit is contained in:
Boris Tsirkin 2025-01-08 06:02:48 -08:00 committed by WebRTC LUCI CQ
parent 256d828aac
commit 59b1d32c35
4 changed files with 58 additions and 60 deletions

View File

@ -96,8 +96,8 @@ class FrameDumpingEncoder : public VideoEncoder, public EncodedImageCallback {
RTC_EXCLUSIVE_LOCKS_REQUIRED(mu_) {
char filename_buffer[1024];
rtc::SimpleStringBuilder builder(filename_buffer);
builder << output_directory_ << "/webrtc_encoded_frames"
<< "." << origin_time_micros_ << "." << index << ".ivf";
builder << output_directory_ << "/webrtc_encoded_frames" << "."
<< origin_time_micros_ << "." << index << ".ivf";
return builder.str();
}

View File

@ -247,31 +247,31 @@ void ReceiveStatisticsProxy::UpdateHistograms(
if (e2e_delay_ms) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".EndToEndDelayInMs",
*e2e_delay_ms);
log_stream << uma_prefix << ".EndToEndDelayInMs"
<< " " << *e2e_delay_ms << '\n';
log_stream << uma_prefix << ".EndToEndDelayInMs" << " " << *e2e_delay_ms
<< '\n';
}
std::optional<int> e2e_delay_max_ms = stats.e2e_delay_counter.Max();
if (e2e_delay_max_ms && e2e_delay_ms) {
RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix + ".EndToEndDelayMaxInMs",
*e2e_delay_max_ms);
log_stream << uma_prefix << ".EndToEndDelayMaxInMs"
<< " " << *e2e_delay_max_ms << '\n';
log_stream << uma_prefix << ".EndToEndDelayMaxInMs" << " "
<< *e2e_delay_max_ms << '\n';
}
std::optional<int> interframe_delay_ms =
stats.interframe_delay_counter.Avg(kMinRequiredSamples);
if (interframe_delay_ms) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".InterframeDelayInMs",
*interframe_delay_ms);
log_stream << uma_prefix << ".InterframeDelayInMs"
<< " " << *interframe_delay_ms << '\n';
log_stream << uma_prefix << ".InterframeDelayInMs" << " "
<< *interframe_delay_ms << '\n';
}
std::optional<int> interframe_delay_max_ms =
stats.interframe_delay_counter.Max();
if (interframe_delay_max_ms && interframe_delay_ms) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".InterframeDelayMaxInMs",
*interframe_delay_max_ms);
log_stream << uma_prefix << ".InterframeDelayMaxInMs"
<< " " << *interframe_delay_max_ms << '\n';
log_stream << uma_prefix << ".InterframeDelayMaxInMs" << " "
<< *interframe_delay_max_ms << '\n';
}
std::optional<uint32_t> interframe_delay_95p_ms =
@ -280,24 +280,24 @@ void ReceiveStatisticsProxy::UpdateHistograms(
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".InterframeDelay95PercentileInMs",
*interframe_delay_95p_ms);
log_stream << uma_prefix << ".InterframeDelay95PercentileInMs"
<< " " << *interframe_delay_95p_ms << '\n';
log_stream << uma_prefix << ".InterframeDelay95PercentileInMs" << " "
<< *interframe_delay_95p_ms << '\n';
}
std::optional<int> width = stats.received_width.Avg(kMinRequiredSamples);
if (width) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".ReceivedWidthInPixels",
*width);
log_stream << uma_prefix << ".ReceivedWidthInPixels"
<< " " << *width << '\n';
log_stream << uma_prefix << ".ReceivedWidthInPixels" << " " << *width
<< '\n';
}
std::optional<int> height = stats.received_height.Avg(kMinRequiredSamples);
if (height) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".ReceivedHeightInPixels",
*height);
log_stream << uma_prefix << ".ReceivedHeightInPixels"
<< " " << *height << '\n';
log_stream << uma_prefix << ".ReceivedHeightInPixels" << " " << *height
<< '\n';
}
std::optional<double> corruption_score = stats.corruption_score.GetMean();
@ -320,8 +320,8 @@ void ReceiveStatisticsProxy::UpdateHistograms(
flow_duration_sec / 1000);
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".MediaBitrateReceivedInKbps", media_bitrate_kbps);
log_stream << uma_prefix << ".MediaBitrateReceivedInKbps"
<< " " << media_bitrate_kbps << '\n';
log_stream << uma_prefix << ".MediaBitrateReceivedInKbps" << " "
<< media_bitrate_kbps << '\n';
}
int num_total_frames =
@ -332,15 +332,14 @@ void ReceiveStatisticsProxy::UpdateHistograms(
(num_key_frames * 1000 + num_total_frames / 2) / num_total_frames;
RTC_HISTOGRAM_COUNTS_SPARSE_1000(
uma_prefix + ".KeyFramesReceivedInPermille", key_frames_permille);
log_stream << uma_prefix << ".KeyFramesReceivedInPermille"
<< " " << key_frames_permille << '\n';
log_stream << uma_prefix << ".KeyFramesReceivedInPermille" << " "
<< key_frames_permille << '\n';
}
std::optional<int> qp = stats.qp_counter.Avg(kMinRequiredSamples);
if (qp) {
RTC_HISTOGRAM_COUNTS_SPARSE_200(uma_prefix + ".Decoded.Vp8.Qp", *qp);
log_stream << uma_prefix << ".Decoded.Vp8.Qp"
<< " " << *qp << '\n';
log_stream << uma_prefix << ".Decoded.Vp8.Qp" << " " << *qp << '\n';
}
}
}

View File

@ -1444,8 +1444,7 @@ void VideoStreamEncoder::ReconfigureEncoder() {
if (!encoder_initialized_) {
RTC_LOG(LS_WARNING) << "Failed to initialize "
<< CodecTypeToPayloadString(codec.codecType)
<< " encoder."
<< "switch_encoder_on_init_failures: "
<< " encoder." << "switch_encoder_on_init_failures: "
<< switch_encoder_on_init_failures_;
if (switch_encoder_on_init_failures_) {
@ -2158,10 +2157,10 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage(
// need to update on quality convergence.
unsigned int image_width = image_copy._encodedWidth;
unsigned int image_height = image_copy._encodedHeight;
encoder_queue_->PostTask([this, codec_type, image_width, image_height,
simulcast_index, qp = image_copy.qp_,
is_steady_state_refresh_frame =
image_copy.IsSteadyStateRefreshFrame()] {
encoder_queue_->PostTask(
[this, codec_type, image_width, image_height, simulcast_index,
qp = image_copy.qp_,
is_steady_state_refresh_frame = image_copy.IsSteadyStateRefreshFrame()] {
RTC_DCHECK_RUN_ON(encoder_queue_.get());
// Check if the encoded image has reached target quality.
@ -2171,13 +2170,13 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage(
// Let the frame cadence adapter know about quality convergence.
if (frame_cadence_adapter_)
frame_cadence_adapter_->UpdateLayerQualityConvergence(simulcast_index,
at_target_quality);
frame_cadence_adapter_->UpdateLayerQualityConvergence(
simulcast_index, at_target_quality);
// Currently, the internal quality scaler is used for VP9 instead of the
// webrtc qp scaler (in the no-svc case or if only a single spatial layer is
// encoded). It has to be explicitly detected and reported to adaptation
// metrics.
// webrtc qp scaler (in the no-svc case or if only a single spatial
// layer is encoded). It has to be explicitly detected and reported to
// adaptation metrics.
if (codec_type == VideoCodecType::kVideoCodecVP9 &&
send_codec_.VP9()->automaticResizeOn) {
unsigned int expected_width = send_codec_.width;