Format the rest of /sdk folder

I already submitted separate CLs for /objc, here I'm
doing the rest of this folder

Formatting done via:

git ls-files | grep -E '^sdk\/.*\.(h|cc|mm)' | grep -Ev '^sdk\/objc.*' | xargs clang-format -i

Bug: webrtc:42225392
Change-Id: Ib2c2bf1d882c4e916a5787c2205818e2e4dcc22d
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/373902
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Danil Chapovalov <danilchap@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Commit-Queue: Danil Chapovalov <danilchap@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#43684}
This commit is contained in:
Boris Tsirkin 2025-01-08 05:58:03 -08:00 committed by WebRTC LUCI CQ
parent 6b6ebf3689
commit d4937d3336
9 changed files with 20 additions and 23 deletions

View File

@ -217,16 +217,16 @@ rtc::scoped_refptr<AudioDeviceModule> CreateAndroidAudioDeviceModule(
// Java audio for input and OpenSL ES for output audio (i.e. mixed APIs). // Java audio for input and OpenSL ES for output audio (i.e. mixed APIs).
// This combination provides low-latency output audio and at the same // This combination provides low-latency output audio and at the same
// time support for HW AEC using the AudioRecord Java API. // time support for HW AEC using the AudioRecord Java API.
return CreateJavaInputAndOpenSLESOutputAudioDeviceModule( return CreateJavaInputAndOpenSLESOutputAudioDeviceModule(env,
env, j_context.obj()); j_context.obj());
#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO) #if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
case AudioDeviceModule::kAndroidAAudioAudio: case AudioDeviceModule::kAndroidAAudioAudio:
// AAudio based audio for both input and output. // AAudio based audio for both input and output.
return CreateAAudioAudioDeviceModule(env, j_context.obj()); return CreateAAudioAudioDeviceModule(env, j_context.obj());
case AudioDeviceModule::kAndroidJavaInputAndAAudioOutputAudio: case AudioDeviceModule::kAndroidJavaInputAndAAudioOutputAudio:
// Java audio for input and AAudio for output audio (i.e. mixed APIs). // Java audio for input and AAudio for output audio (i.e. mixed APIs).
return CreateJavaInputAndAAudioOutputAudioDeviceModule( return CreateJavaInputAndAAudioOutputAudioDeviceModule(env,
env, j_context.obj()); j_context.obj());
#endif #endif
default: default:
return nullptr; return nullptr;

View File

@ -32,14 +32,12 @@ rtc::scoped_refptr<AudioDeviceModule> CreateOpenSLESAudioDeviceModule(
jobject application_context); jobject application_context);
rtc::scoped_refptr<AudioDeviceModule> rtc::scoped_refptr<AudioDeviceModule>
CreateJavaInputAndOpenSLESOutputAudioDeviceModule( CreateJavaInputAndOpenSLESOutputAudioDeviceModule(JNIEnv* env,
JNIEnv* env, jobject application_context);
jobject application_context);
rtc::scoped_refptr<AudioDeviceModule> rtc::scoped_refptr<AudioDeviceModule>
CreateJavaInputAndAAudioOutputAudioDeviceModule( CreateJavaInputAndAAudioOutputAudioDeviceModule(JNIEnv* env,
JNIEnv* env, jobject application_context);
jobject application_context);
rtc::scoped_refptr<AudioDeviceModule> CreateAndroidAudioDeviceModule( rtc::scoped_refptr<AudioDeviceModule> CreateAndroidAudioDeviceModule(
AudioDeviceModule::AudioLayer audio_layer); AudioDeviceModule::AudioLayer audio_layer);

View File

@ -335,8 +335,7 @@ ScopedJavaLocalRef<jobjectArray> NativeToJavaStringArray(
} }
JavaListBuilder::JavaListBuilder(JNIEnv* env) JavaListBuilder::JavaListBuilder(JNIEnv* env)
: env_(env), j_list_(JNI_ArrayList::Java_ArrayList_Constructor(env)) { : env_(env), j_list_(JNI_ArrayList::Java_ArrayList_Constructor(env)) {}
}
JavaListBuilder::~JavaListBuilder() = default; JavaListBuilder::~JavaListBuilder() = default;
@ -346,8 +345,7 @@ void JavaListBuilder::add(const jni_zero::JavaRef<jobject>& element) {
JavaMapBuilder::JavaMapBuilder(JNIEnv* env) JavaMapBuilder::JavaMapBuilder(JNIEnv* env)
: env_(env), : env_(env),
j_map_(JNI_LinkedHashMap::Java_LinkedHashMap_Constructor(env)) { j_map_(JNI_LinkedHashMap::Java_LinkedHashMap_Constructor(env)) {}
}
JavaMapBuilder::~JavaMapBuilder() = default; JavaMapBuilder::~JavaMapBuilder() = default;

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#ifndef SDK_ANDROID_NATIVE_API_JNI_JNI_INT_WRAPPER_H_ #ifndef SDK_ANDROID_NATIVE_API_JNI_JNI_INT_WRAPPER_H_
#define SDK_ANDROID_NATIVE_API_JNI_JNI_INT_WRAPPER_H_ #define SDK_ANDROID_NATIVE_API_JNI_JNI_INT_WRAPPER_H_

View File

@ -424,8 +424,7 @@ void AndroidNetworkMonitor::OnNetworkConnected_n(
if (iter != network_info_by_handle_.end()) { if (iter != network_info_by_handle_.end()) {
// Remove old if_name for this handle if they don't match. // Remove old if_name for this handle if they don't match.
if (network_info.interface_name != iter->second.interface_name) { if (network_info.interface_name != iter->second.interface_name) {
RTC_LOG(LS_INFO) << "Network" RTC_LOG(LS_INFO) << "Network" << " handle " << network_info.handle
<< " handle " << network_info.handle
<< " change if_name from: " << " change if_name from: "
<< iter->second.interface_name << iter->second.interface_name
<< " to: " << network_info.interface_name; << " to: " << network_info.interface_name;

View File

@ -639,11 +639,13 @@ void GetAudioParameters(JNIEnv* env,
RTC_CHECK(output_parameters->is_valid()); RTC_CHECK(output_parameters->is_valid());
} }
bool IsLowLatencyInputSupported(JNIEnv* env, const JavaRef<jobject>& j_context) { bool IsLowLatencyInputSupported(JNIEnv* env,
const JavaRef<jobject>& j_context) {
return Java_WebRtcAudioManager_isLowLatencyInputSupported(env, j_context); return Java_WebRtcAudioManager_isLowLatencyInputSupported(env, j_context);
} }
bool IsLowLatencyOutputSupported(JNIEnv* env, const JavaRef<jobject>& j_context) { bool IsLowLatencyOutputSupported(JNIEnv* env,
const JavaRef<jobject>& j_context) {
return Java_WebRtcAudioManager_isLowLatencyOutputSupported(env, j_context); return Java_WebRtcAudioManager_isLowLatencyOutputSupported(env, j_context);
} }

View File

@ -91,7 +91,8 @@ void GetAudioParameters(JNIEnv* env,
bool IsLowLatencyInputSupported(JNIEnv* env, const JavaRef<jobject>& j_context); bool IsLowLatencyInputSupported(JNIEnv* env, const JavaRef<jobject>& j_context);
bool IsLowLatencyOutputSupported(JNIEnv* env, const JavaRef<jobject>& j_context); bool IsLowLatencyOutputSupported(JNIEnv* env,
const JavaRef<jobject>& j_context);
// Glue together an audio input and audio output to get an AudioDeviceModule. // Glue together an audio input and audio output to get an AudioDeviceModule.
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput( rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput(

View File

@ -285,7 +285,7 @@ void JavaToNativeRTCConfiguration(
} }
rtc_config->port_allocator_config.flags = rtc_config->port_allocator_config.flags =
Java_RTCConfiguration_getPortAllocatorFlags(jni, j_rtc_config); Java_RTCConfiguration_getPortAllocatorFlags(jni, j_rtc_config);
} }
rtc::KeyType GetRtcConfigKeyType( rtc::KeyType GetRtcConfigKeyType(

View File

@ -89,8 +89,8 @@ class MediaConstraints {
static const char kEnableIPv6[]; // googIPv6 static const char kEnableIPv6[]; // googIPv6
// Temporary constraint to enable suspend below min bitrate feature. // Temporary constraint to enable suspend below min bitrate feature.
static const char kEnableVideoSuspendBelowMinBitrate[]; static const char kEnableVideoSuspendBelowMinBitrate[];
static const char kScreencastMinBitrate[]; // googScreencastMinBitrate static const char kScreencastMinBitrate[]; // googScreencastMinBitrate
static const char kCpuOveruseDetection[]; // googCpuOveruseDetection static const char kCpuOveruseDetection[]; // googCpuOveruseDetection
// Constraint to enable negotiating raw RTP packetization using attribute // Constraint to enable negotiating raw RTP packetization using attribute
// "a=packetization:<payload_type> raw" in the SDP for all video payload. // "a=packetization:<payload_type> raw" in the SDP for all video payload.