Format /sdk/objc/components folder

There are a lot of changes in /sdk so I'm splitting it

Formatting done via:

git ls-files | grep -E '^sdk\/objc\/components.*\.(h|cc|mm)' | xargs clang-format -i

No-Iwyu: Includes didn't change and it isn't related to formatting
Bug: webrtc:42225392
Change-Id: I18632b265b2279e804b2f86abfc66601d12bb2ed
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/373901
Commit-Queue: Danil Chapovalov <danilchap@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#43681}
This commit is contained in:
Boris Tsirkin 2025-01-08 05:48:10 -08:00 committed by WebRTC LUCI CQ
parent 3e8e4784ac
commit 725f931f2f
33 changed files with 984 additions and 602 deletions

View File

@ -37,7 +37,8 @@ typedef OSStatus (^RTC_OBJC_TYPE(RTCAudioDeviceDeliverRecordedDataBlock))(
UInt32 frameCount,
const AudioBufferList *_Nullable inputData,
void *_Nullable renderContext,
NS_NOESCAPE RTC_OBJC_TYPE(RTCAudioDeviceRenderRecordedDataBlock) _Nullable renderBlock);
NS_NOESCAPE RTC_OBJC_TYPE(
RTCAudioDeviceRenderRecordedDataBlock) _Nullable renderBlock);
/**
* Delegate object provided by native ADM during RTCAudioDevice initialization.
@ -47,18 +48,21 @@ typedef OSStatus (^RTC_OBJC_TYPE(RTCAudioDeviceDeliverRecordedDataBlock))(
RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE
(RTCAudioDeviceDelegate)<NSObject>
/**
* Implementation of RTCAudioSource should call this block to feed recorded PCM (16-bit integer)
* into native ADM. Stereo data is expected to be interleaved starting with the left channel.
* Either `inputData` with pre-filled audio data must be provided during block
* call or `renderBlock` must be provided which must fill provided audio buffer with recorded
* Implementation of RTCAudioSource should call this block to feed recorded
* PCM (16-bit integer) into native ADM. Stereo data is expected to be
* interleaved starting with the left channel. Either `inputData` with
* pre-filled audio data must be provided during block call or `renderBlock`
* must be provided which must fill provided audio buffer with recorded
* samples.
*
* NOTE: Implementation of RTCAudioDevice is expected to call the block on the same thread until
* `notifyAudioInterrupted` is called. When `notifyAudioInterrupted` is called implementation
* can call the block on a different thread.
* NOTE: Implementation of RTCAudioDevice is expected to call the block on
* the same thread until `notifyAudioInterrupted` is called. When
* `notifyAudioInterrupted` is called implementation can call the block on a
* different thread.
*/
@property(readonly, nonnull)
RTC_OBJC_TYPE(RTCAudioDeviceDeliverRecordedDataBlock) deliverRecordedData;
RTC_OBJC_TYPE(RTCAudioDeviceDeliverRecordedDataBlock)
deliverRecordedData;
/**
* Provides input sample rate preference as it preferred by native ADM.
@ -81,30 +85,37 @@ RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE
@property(readonly) NSTimeInterval preferredOutputIOBufferDuration;
/**
* Implementation of RTCAudioDevice should call this block to request PCM (16-bit integer)
* from native ADM to play. Stereo data is interleaved starting with the left channel.
* Implementation of RTCAudioDevice should call this block to request PCM
* (16-bit integer) from native ADM to play. Stereo data is interleaved starting
* with the left channel.
*
* NOTE: Implementation of RTCAudioDevice is expected to invoke of this block on the
* same thread until `notifyAudioInterrupted` is called. When `notifyAudioInterrupted` is called
* implementation can call the block from a different thread.
* NOTE: Implementation of RTCAudioDevice is expected to invoke of this block on
* the same thread until `notifyAudioInterrupted` is called. When
* `notifyAudioInterrupted` is called implementation can call the block from a
* different thread.
*/
@property(readonly, nonnull) RTC_OBJC_TYPE(RTCAudioDeviceGetPlayoutDataBlock) getPlayoutData;
@property(readonly, nonnull) RTC_OBJC_TYPE(RTCAudioDeviceGetPlayoutDataBlock)
getPlayoutData;
/**
* Notifies native ADM that some of the audio input parameters of RTCAudioDevice like
* samle rate and/or IO buffer duration and/or IO latency had possibly changed.
* Native ADM will adjust its audio input buffer to match current parameters of audio device.
* Notifies native ADM that some of the audio input parameters of RTCAudioDevice
* like samle rate and/or IO buffer duration and/or IO latency had possibly
* changed. Native ADM will adjust its audio input buffer to match current
* parameters of audio device.
*
* NOTE: Must be called within block executed via `dispatchAsync` or `dispatchSync`.
* NOTE: Must be called within block executed via `dispatchAsync` or
* `dispatchSync`.
*/
- (void)notifyAudioInputParametersChange;
/**
* Notifies native ADM that some of the audio output parameters of RTCAudioDevice like
* samle rate and/or IO buffer duration and/or IO latency had possibly changed.
* Native ADM will adjust its audio output buffer to match current parameters of audio device.
* Notifies native ADM that some of the audio output parameters of
* RTCAudioDevice like samle rate and/or IO buffer duration and/or IO latency
* had possibly changed. Native ADM will adjust its audio output buffer to match
* current parameters of audio device.
*
* NOTE: Must be called within block executed via `dispatchAsync` or `dispatchSync`.
* NOTE: Must be called within block executed via `dispatchAsync` or
* `dispatchSync`.
*/
- (void)notifyAudioOutputParametersChange;
@ -112,15 +123,17 @@ RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE
* Notifies native ADM that audio input is interrupted and further audio playout
* and recording might happen on a different thread.
*
* NOTE: Must be called within block executed via `dispatchAsync` or `dispatchSync`.
* NOTE: Must be called within block executed via `dispatchAsync` or
* `dispatchSync`.
*/
- (void)notifyAudioInputInterrupted;
/**
* Notifies native ADM that audio output is interrupted and further audio playout
* and recording might happen on a different thread.
* Notifies native ADM that audio output is interrupted and further audio
* playout and recording might happen on a different thread.
*
* NOTE: Must be called within block executed via `dispatchAsync` or `dispatchSync`.
* NOTE: Must be called within block executed via `dispatchAsync` or
* `dispatchSync`.
*/
- (void)notifyAudioOutputInterrupted;
@ -133,8 +146,8 @@ RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE
* `notifyAudioOutputInterrupted` on native ADM thread.
* Also could be used by `RTCAudioDevice` implementation to tie
* mutations of underlying audio objects (AVAudioEngine, AudioUnit, etc)
* to the native ADM thread. Could be useful to handle events like audio route change, which
* could lead to audio parameters change.
* to the native ADM thread. Could be useful to handle events like audio route
* change, which could lead to audio parameters change.
*/
- (void)dispatchAsync:(dispatch_block_t)block;
@ -146,42 +159,48 @@ RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE
* `notifyAudioOutputParametersChange`, `notifyAudioInputInterrupted`,
* `notifyAudioOutputInterrupted` on native ADM thread and make sure
* aforementioned is completed before `dispatchSync` returns. Could be useful
* when implementation of `RTCAudioDevice` tie mutation to underlying audio objects (AVAudioEngine,
* AudioUnit, etc) to own thread to satisfy requirement that native ADM audio parameters
* must be kept in sync with current audio parameters before audio is actually played or recorded.
* when implementation of `RTCAudioDevice` tie mutation to underlying audio
* objects (AVAudioEngine, AudioUnit, etc) to own thread to satisfy requirement
* that native ADM audio parameters must be kept in sync with current audio
* parameters before audio is actually played or recorded.
*/
- (void)dispatchSync:(dispatch_block_t)block;
@end
/**
* Protocol to abstract platform specific ways to implement playback and recording.
* Protocol to abstract platform specific ways to implement playback and
* recording.
*
* NOTE: All the members of protocol are called by native ADM from the same thread
* between calls to `initializeWithDelegate` and `terminate`.
* NOTE: Implementation is fully responsible for configuring application's AVAudioSession.
* An example implementation of RTCAudioDevice: https://github.com/mstyura/RTCAudioDevice
* TODO(yura.yaroshevich): Implement custom RTCAudioDevice for AppRTCMobile demo app.
* NOTE: All the members of protocol are called by native ADM from the same
* thread between calls to `initializeWithDelegate` and `terminate`. NOTE:
* Implementation is fully responsible for configuring application's
* AVAudioSession. An example implementation of RTCAudioDevice:
* https://github.com/mstyura/RTCAudioDevice
* TODO(yura.yaroshevich): Implement custom RTCAudioDevice for AppRTCMobile demo
* app.
*/
RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE
(RTCAudioDevice)<NSObject>
/**
* Indicates current sample rate of audio recording. Changes to this property
* must be notified back to native ADM via `-[RTCAudioDeviceDelegate
* notifyAudioParametersChange]`.
* Indicates current sample rate of audio recording. Changes to this
* property must be notified back to native ADM via
* `-[RTCAudioDeviceDelegate notifyAudioParametersChange]`.
*/
@property(readonly) double deviceInputSampleRate;
/**
* Indicates current size of record buffer. Changes to this property
* must be notified back to native ADM via `-[RTCAudioDeviceDelegate notifyAudioParametersChange]`.
* must be notified back to native ADM via `-[RTCAudioDeviceDelegate
* notifyAudioParametersChange]`.
*/
@property(readonly) NSTimeInterval inputIOBufferDuration;
/**
* Indicates current number of recorded audio channels. Changes to this property
* must be notified back to native ADM via `-[RTCAudioDeviceDelegate notifyAudioParametersChange]`.
* must be notified back to native ADM via `-[RTCAudioDeviceDelegate
* notifyAudioParametersChange]`.
*/
@property(readonly) NSInteger inputNumberOfChannels;
@ -192,19 +211,22 @@ RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE
/**
* Indicates current sample rate of audio playback. Changes to this property
* must be notified back to native ADM via `-[RTCAudioDeviceDelegate notifyAudioParametersChange]`.
* must be notified back to native ADM via `-[RTCAudioDeviceDelegate
* notifyAudioParametersChange]`.
*/
@property(readonly) double deviceOutputSampleRate;
/**
* Indicates current size of playback buffer. Changes to this property
* must be notified back to native ADM via `-[RTCAudioDeviceDelegate notifyAudioParametersChange]`.
* must be notified back to native ADM via `-[RTCAudioDeviceDelegate
* notifyAudioParametersChange]`.
*/
@property(readonly) NSTimeInterval outputIOBufferDuration;
/**
* Indicates current number of playback audio channels. Changes to this property
* must be notified back to WebRTC via `[RTCAudioDeviceDelegate notifyAudioParametersChange]`.
* must be notified back to WebRTC via `[RTCAudioDeviceDelegate
* notifyAudioParametersChange]`.
*/
@property(readonly) NSInteger outputNumberOfChannels;
@ -214,36 +236,40 @@ RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE
@property(readonly) NSTimeInterval outputLatency;
/**
* Indicates if invocation of `initializeWithDelegate` required before usage of RTCAudioDevice.
* YES indicates that `initializeWithDelegate` was called earlier without subsequent call to
* `terminate`. NO indicates that either `initializeWithDelegate` not called or `terminate` called.
* Indicates if invocation of `initializeWithDelegate` required before usage of
* RTCAudioDevice. YES indicates that `initializeWithDelegate` was called
* earlier without subsequent call to `terminate`. NO indicates that either
* `initializeWithDelegate` not called or `terminate` called.
*/
@property(readonly) BOOL isInitialized;
/**
* Initializes RTCAudioDevice with RTCAudioDeviceDelegate.
* Implementation must return YES if RTCAudioDevice initialized successfully and NO otherwise.
* Implementation must return YES if RTCAudioDevice initialized successfully and
* NO otherwise.
*/
- (BOOL)initializeWithDelegate:(id<RTC_OBJC_TYPE(RTCAudioDeviceDelegate)>)delegate;
- (BOOL)initializeWithDelegate:
(id<RTC_OBJC_TYPE(RTCAudioDeviceDelegate)>)delegate;
/**
* De-initializes RTCAudioDevice. Implementation should forget about `delegate` provided in
* `initializeWithDelegate`.
* De-initializes RTCAudioDevice. Implementation should forget about `delegate`
* provided in `initializeWithDelegate`.
*/
- (BOOL)terminateDevice;
/**
* Property to indicate if `initializePlayout` call required before invocation of `startPlayout`.
* YES indicates that `initializePlayout` was successfully invoked earlier or not necessary,
* NO indicates that `initializePlayout` invocation required.
* Property to indicate if `initializePlayout` call required before invocation
* of `startPlayout`. YES indicates that `initializePlayout` was successfully
* invoked earlier or not necessary, NO indicates that `initializePlayout`
* invocation required.
*/
@property(readonly) BOOL isPlayoutInitialized;
/**
* Prepares RTCAudioDevice to play audio.
* Called by native ADM before invocation of `startPlayout`.
* Implementation is expected to return YES in case of successful playout initialization and NO
* otherwise.
* Implementation is expected to return YES in case of successful playout
* initialization and NO otherwise.
*/
- (BOOL)initializePlayout;
@ -268,18 +294,19 @@ RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE
- (BOOL)stopPlayout;
/**
* Property to indicate if `initializeRecording` call required before usage of `startRecording`.
* YES indicates that `initializeRecording` was successfully invoked earlier or not necessary,
* NO indicates that `initializeRecording` invocation required.
* Property to indicate if `initializeRecording` call required before usage of
* `startRecording`. YES indicates that `initializeRecording` was successfully
* invoked earlier or not necessary, NO indicates that `initializeRecording`
* invocation required.
*/
@property(readonly) BOOL isRecordingInitialized;
/**
* Prepares RTCAudioDevice to record audio.
* Called by native ADM before invocation of `startRecording`.
* Implementation may use this method to prepare resources required to record audio.
* Implementation is expected to return YES in case of successful record initialization and NO
* otherwise.
* Implementation may use this method to prepare resources required to record
* audio. Implementation is expected to return YES in case of successful record
* initialization and NO otherwise.
*/
- (BOOL)initializeRecording;

View File

@ -16,7 +16,8 @@
@implementation RTC_OBJC_TYPE (RTCAudioSession)
(Configuration)
- (BOOL)setConfiguration : (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error
- (BOOL)setConfiguration
: (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error
: (NSError **)outError {
return [self setConfiguration:configuration
active:NO
@ -24,7 +25,8 @@
error:outError];
}
- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
- (BOOL)setConfiguration:
(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
active:(BOOL)active
error:(NSError **)outError {
return [self setConfiguration:configuration
@ -35,7 +37,8 @@
#pragma mark - Private
- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
- (BOOL)setConfiguration:
(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
active:(BOOL)active
shouldSetActive:(BOOL)shouldSetActive
error:(NSError **)outError {
@ -49,17 +52,21 @@
// everything we can.
NSError *error = nil;
if (self.category != configuration.category || self.mode != configuration.mode ||
if (self.category != configuration.category ||
self.mode != configuration.mode ||
self.categoryOptions != configuration.categoryOptions) {
NSError *configuringError = nil;
if (![self setCategory:configuration.category
mode:configuration.mode
options:configuration.categoryOptions
error:&configuringError]) {
RTCLogError(@"Failed to set category and mode: %@", configuringError.localizedDescription);
RTCLogError(@"Failed to set category and mode: %@",
configuringError.localizedDescription);
error = configuringError;
} else {
RTCLog(@"Set category to: %@, mode: %@", configuration.category, configuration.mode);
RTCLog(@"Set category to: %@, mode: %@",
configuration.category,
configuration.mode);
}
}
@ -73,8 +80,7 @@
error = sampleRateError;
}
} else {
RTCLog(@"Set preferred sample rate to: %.2f",
configuration.sampleRate);
RTCLog(@"Set preferred sample rate to: %.2f", configuration.sampleRate);
}
}
@ -97,7 +103,8 @@
NSError *activeError = nil;
if (![self setActive:active error:&activeError]) {
RTCLogError(@"Failed to setActive to %d: %@",
active, activeError.localizedDescription);
active,
activeError.localizedDescription);
error = activeError;
}
}
@ -113,11 +120,11 @@
NSError *inputChannelsError = nil;
if (![self setPreferredInputNumberOfChannels:inputNumberOfChannels
error:&inputChannelsError]) {
RTCLogError(@"Failed to set preferred input number of channels: %@",
inputChannelsError.localizedDescription);
if (!self.ignoresPreferredAttributeConfigurationErrors) {
error = inputChannelsError;
}
RTCLogError(@"Failed to set preferred input number of channels: %@",
inputChannelsError.localizedDescription);
if (!self.ignoresPreferredAttributeConfigurationErrors) {
error = inputChannelsError;
}
} else {
RTCLog(@"Set input number of channels to: %ld",
(long)inputNumberOfChannels);

View File

@ -30,8 +30,8 @@ NS_ASSUME_NONNULL_BEGIN
/** Convenience BOOL that checks useManualAudio and isAudioEnebled. */
@property(readonly) BOOL canPlayOrRecord;
/** Tracks whether we have been sent an interruption event that hasn't been matched by either an
* interrupted end event or a foreground event.
/** Tracks whether we have been sent an interruption event that hasn't been
* matched by either an interrupted end event or a foreground event.
*/
@property(nonatomic, assign) BOOL isInterrupted;
@ -76,14 +76,17 @@ NS_ASSUME_NONNULL_BEGIN
/** Notifies the receiver that a playout glitch was detected. */
- (void)notifyDidDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches;
/** Notifies the receiver that there was an error when starting an audio unit. */
/** Notifies the receiver that there was an error when starting an audio unit.
*/
- (void)notifyAudioUnitStartFailedWithError:(OSStatus)error;
// Properties and methods for tests.
- (void)notifyDidBeginInterruption;
- (void)notifyDidEndInterruptionWithShouldResumeSession:(BOOL)shouldResumeSession;
- (void)notifyDidEndInterruptionWithShouldResumeSession:
(BOOL)shouldResumeSession;
- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
previousRoute:
(AVAudioSessionRouteDescription *)previousRoute;
- (void)notifyMediaServicesWereLost;
- (void)notifyMediaServicesWereReset;
- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;

View File

@ -35,7 +35,8 @@ RTC_OBJC_EXPORT
/** Called on a system notification thread when AVAudioSession starts an
* interruption event.
*/
- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
- (void)audioSessionDidBeginInterruption:
(RTC_OBJC_TYPE(RTCAudioSession) *)session;
/** Called on a system notification thread when AVAudioSession ends an
* interruption event.
@ -48,12 +49,14 @@ RTC_OBJC_EXPORT
*/
- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
reason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
previousRoute:
(AVAudioSessionRouteDescription *)previousRoute;
/** Called on a system notification thread when AVAudioSession media server
* terminates.
*/
- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
- (void)audioSessionMediaServerTerminated:
(RTC_OBJC_TYPE(RTCAudioSession) *)session;
/** Called on a system notification thread when AVAudioSession media server
* restarts.
@ -68,12 +71,14 @@ RTC_OBJC_EXPORT
/** Called on a WebRTC thread when the audio device is notified to begin
* playback or recording.
*/
- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
- (void)audioSessionDidStartPlayOrRecord:
(RTC_OBJC_TYPE(RTCAudioSession) *)session;
/** Called on a WebRTC thread when the audio device is notified to stop
* playback or recording.
*/
- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
- (void)audioSessionDidStopPlayOrRecord:
(RTC_OBJC_TYPE(RTCAudioSession) *)session;
/** Called when the AVAudioSession output volume value changes. */
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
@ -87,11 +92,13 @@ RTC_OBJC_EXPORT
/** Called when the audio session is about to change the active state.
*/
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession willSetActive:(BOOL)active;
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
willSetActive:(BOOL)active;
/** Called after the audio session sucessfully changed the active state.
*/
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession didSetActive:(BOOL)active;
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
didSetActive:(BOOL)active;
/** Called after the audio session failed to change the active state.
*/
@ -105,8 +112,9 @@ RTC_OBJC_EXPORT
@end
/** This is a protocol used to inform RTCAudioSession when the audio session
* activation state has changed outside of RTCAudioSession. The current known use
* case of this is when CallKit activates the audio session for the application
* activation state has changed outside of RTCAudioSession. The current known
* use case of this is when CallKit activates the audio session for the
* application
*/
RTC_OBJC_EXPORT
@protocol RTC_OBJC_TYPE
@ -173,10 +181,14 @@ RTC_OBJC_EXPORT
@property(readonly) float inputGain;
@property(readonly) BOOL inputGainSettable;
@property(readonly) BOOL inputAvailable;
@property(readonly, nullable) NSArray<AVAudioSessionDataSourceDescription *> *inputDataSources;
@property(readonly, nullable) AVAudioSessionDataSourceDescription *inputDataSource;
@property(readonly, nullable) NSArray<AVAudioSessionDataSourceDescription *> *outputDataSources;
@property(readonly, nullable) AVAudioSessionDataSourceDescription *outputDataSource;
@property(readonly, nullable)
NSArray<AVAudioSessionDataSourceDescription *> *inputDataSources;
@property(readonly, nullable)
AVAudioSessionDataSourceDescription *inputDataSource;
@property(readonly, nullable)
NSArray<AVAudioSessionDataSourceDescription *> *outputDataSources;
@property(readonly, nullable)
AVAudioSessionDataSourceDescription *outputDataSource;
@property(readonly) double sampleRate;
@property(readonly) double preferredSampleRate;
@property(readonly) NSInteger inputNumberOfChannels;
@ -188,10 +200,11 @@ RTC_OBJC_EXPORT
@property(readonly) NSTimeInterval preferredIOBufferDuration;
/**
When YES, calls to -setConfiguration:error: and -setConfiguration:active:error: ignore errors in
configuring the audio session's "preferred" attributes (e.g. preferredInputNumberOfChannels).
Typically, configurations to preferred attributes are optimizations, and ignoring this type of
configuration error allows code flow to continue along the happy path when these optimization are
When YES, calls to -setConfiguration:error: and -setConfiguration:active:error:
ignore errors in configuring the audio session's "preferred" attributes (e.g.
preferredInputNumberOfChannels). Typically, configurations to preferred
attributes are optimizations, and ignoring this type of configuration error
allows code flow to continue along the happy path when these optimization are
not available. The default value of this property is NO.
*/
@property(nonatomic) BOOL ignoresPreferredAttributeConfigurationErrors;
@ -235,11 +248,16 @@ RTC_OBJC_EXPORT
- (BOOL)setMode:(AVAudioSessionMode)mode error:(NSError **)outError;
- (BOOL)setInputGain:(float)gain error:(NSError **)outError;
- (BOOL)setPreferredSampleRate:(double)sampleRate error:(NSError **)outError;
- (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration error:(NSError **)outError;
- (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count error:(NSError **)outError;
- (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count error:(NSError **)outError;
- (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride error:(NSError **)outError;
- (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort error:(NSError **)outError;
- (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration
error:(NSError **)outError;
- (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count
error:(NSError **)outError;
- (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count
error:(NSError **)outError;
- (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride
error:(NSError **)outError;
- (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort
error:(NSError **)outError;
- (BOOL)setInputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
error:(NSError **)outError;
- (BOOL)setOutputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
@ -254,13 +272,15 @@ RTC_OBJC_EXPORT
* returned.
* `lockForConfiguration` must be called first.
*/
- (BOOL)setConfiguration : (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error
- (BOOL)setConfiguration
: (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error
: (NSError **)outError;
/** Convenience method that calls both setConfiguration and setActive.
* `lockForConfiguration` must be called first.
*/
- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
- (BOOL)setConfiguration:
(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
active:(BOOL)active
error:(NSError **)outError;

View File

@ -26,20 +26,22 @@
#error ABSL_HAVE_THREAD_LOCAL should be defined for MacOS / iOS Targets.
#endif
NSString *const kRTCAudioSessionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCAudioSession)";
NSString *const kRTCAudioSessionErrorDomain =
@"org.webrtc.RTC_OBJC_TYPE(RTCAudioSession)";
NSInteger const kRTCAudioSessionErrorLockRequired = -1;
NSInteger const kRTCAudioSessionErrorConfiguration = -2;
NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
NSString *const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
namespace {
// Since webrtc::Mutex is not a reentrant lock and cannot check if the mutex is locked,
// we need a separate variable to check that the mutex is locked in the RTCAudioSession.
// Since webrtc::Mutex is not a reentrant lock and cannot check if the mutex is
// locked, we need a separate variable to check that the mutex is locked in the
// RTCAudioSession.
ABSL_CONST_INIT thread_local bool mutex_locked = false;
} // namespace
@interface RTC_OBJC_TYPE (RTCAudioSession)
() @property(nonatomic,
readonly) std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> > delegates;
() @property(nonatomic, readonly)
std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> > delegates;
@end
// This class needs to be thread-safe because it is accessed from many threads.
@ -75,7 +77,8 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
return [self initWithAudioSession:[AVAudioSession sharedInstance]];
}
/** This initializer provides a way for unit tests to inject a fake/mock audio session. */
/** This initializer provides a way for unit tests to inject a fake/mock audio
* session. */
- (instancetype)initWithAudioSession:(id)audioSession {
self = [super init];
if (self) {
@ -105,15 +108,17 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
selector:@selector(handleSilenceSecondaryAudioHintNotification:)
name:AVAudioSessionSilenceSecondaryAudioHintNotification
object:nil];
// Also track foreground event in order to deal with interruption ended situation.
// Also track foreground event in order to deal with interruption ended
// situation.
[center addObserver:self
selector:@selector(handleApplicationDidBecomeActive:)
name:UIApplicationDidBecomeActiveNotification
object:nil];
[_session addObserver:self
forKeyPath:kRTCAudioSessionOutputVolumeSelector
options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class];
[_session
addObserver:self
forKeyPath:kRTCAudioSessionOutputVolumeSelector
options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class];
RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): init.", self);
}
@ -122,9 +127,10 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
- (void)dealloc {
[[NSNotificationCenter defaultCenter] removeObserver:self];
[_session removeObserver:self
forKeyPath:kRTCAudioSessionOutputVolumeSelector
context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class];
[_session
removeObserver:self
forKeyPath:kRTCAudioSessionOutputVolumeSelector
context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class];
RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): dealloc.", self);
}
@ -142,11 +148,19 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
" inputLatency: %f\n"
" outputVolume: %f\n"
"}";
NSString *description = [NSString stringWithFormat:format,
self.category, (long)self.categoryOptions, self.mode,
self.isActive, self.sampleRate, self.IOBufferDuration,
self.outputNumberOfChannels, self.inputNumberOfChannels,
self.outputLatency, self.inputLatency, self.outputVolume];
NSString *description =
[NSString stringWithFormat:format,
self.category,
(long)self.categoryOptions,
self.mode,
self.isActive,
self.sampleRate,
self.IOBufferDuration,
self.outputNumberOfChannels,
self.inputNumberOfChannels,
self.outputLatency,
self.inputLatency,
self.outputVolume];
return description;
}
@ -201,7 +215,8 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
ignoresPreferredAttributeConfigurationErrors) {
return;
}
_ignoresPreferredAttributeConfigurationErrors = ignoresPreferredAttributeConfigurationErrors;
_ignoresPreferredAttributeConfigurationErrors =
ignoresPreferredAttributeConfigurationErrors;
}
}
@ -229,10 +244,9 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
return;
}
@synchronized(self) {
_delegates.erase(std::remove(_delegates.begin(),
_delegates.end(),
delegate),
_delegates.end());
_delegates.erase(
std::remove(_delegates.begin(), _delegates.end(), delegate),
_delegates.end());
[self removeZeroedDelegates];
}
}
@ -347,8 +361,7 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
return self.session.preferredIOBufferDuration;
}
- (BOOL)setActive:(BOOL)active
error:(NSError **)outError {
- (BOOL)setActive:(BOOL)active error:(NSError **)outError {
if (![self checkLock:outError]) {
return NO;
}
@ -373,9 +386,7 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
// option.
AVAudioSessionSetActiveOptions options =
active ? 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
success = [session setActive:active
withOptions:options
error:&error];
success = [session setActive:active withOptions:options error:&error];
if (outError) {
*outError = error;
}
@ -394,7 +405,8 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
}
} else {
RTCLogError(@"Failed to setActive:%d. Error: %@",
active, error.localizedDescription);
active,
error.localizedDescription);
[self notifyFailedToSetActive:active error:error];
}
// Set isActive and decrement activation count on deactivation
@ -417,7 +429,10 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
if (![self checkLock:outError]) {
return NO;
}
return [self.session setCategory:category mode:mode options:options error:outError];
return [self.session setCategory:category
mode:mode
options:options
error:outError];
}
- (BOOL)setCategory:(AVAudioSessionCategory)category
@ -508,7 +523,7 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
#pragma mark - Notifications
- (void)handleInterruptionNotification:(NSNotification *)notification {
NSNumber* typeNumber =
NSNumber *typeNumber =
notification.userInfo[AVAudioSessionInterruptionTypeKey];
AVAudioSessionInterruptionType type =
(AVAudioSessionInterruptionType)typeNumber.unsignedIntegerValue;
@ -537,7 +552,7 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
- (void)handleRouteChangeNotification:(NSNotification *)notification {
// Get reason for current route change.
NSNumber* reasonNumber =
NSNumber *reasonNumber =
notification.userInfo[AVAudioSessionRouteChangeReasonKey];
AVAudioSessionRouteChangeReason reason =
(AVAudioSessionRouteChangeReason)reasonNumber.unsignedIntegerValue;
@ -569,11 +584,12 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
RTCLog(@"Audio route changed: RouteConfigurationChange");
break;
}
AVAudioSessionRouteDescription* previousRoute =
AVAudioSessionRouteDescription *previousRoute =
notification.userInfo[AVAudioSessionRouteChangePreviousRouteKey];
// Log previous route configuration.
RTCLog(@"Previous route: %@\nCurrent route:%@",
previousRoute, self.session.currentRoute);
previousRoute,
self.session.currentRoute);
[self notifyDidChangeRouteWithReason:reason previousRoute:previousRoute];
}
@ -589,14 +605,16 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
[self notifyMediaServicesWereReset];
}
- (void)handleSilenceSecondaryAudioHintNotification:(NSNotification *)notification {
- (void)handleSilenceSecondaryAudioHintNotification:
(NSNotification *)notification {
// TODO(henrika): just adding logs here for now until we know if we are ever
// see this notification and might be affected by it or if further actions
// are required.
NSNumber *typeNumber =
notification.userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey];
AVAudioSessionSilenceSecondaryAudioHintType type =
(AVAudioSessionSilenceSecondaryAudioHintType)typeNumber.unsignedIntegerValue;
(AVAudioSessionSilenceSecondaryAudioHintType)
typeNumber.unsignedIntegerValue;
switch (type) {
case AVAudioSessionSilenceSecondaryAudioHintTypeBegin:
RTCLog(@"Another application's primary audio has started.");
@ -609,7 +627,8 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
BOOL isInterrupted = self.isInterrupted;
RTCLog(@"Application became active after an interruption. Treating as interruption "
RTCLog(@"Application became active after an interruption. Treating as "
@"interruption "
"end. isInterrupted changed from %d to 0.",
isInterrupted);
if (isInterrupted) {
@ -623,11 +642,14 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
#pragma mark - Private
+ (NSError *)lockError {
NSDictionary *userInfo =
@{NSLocalizedDescriptionKey : @"Must call lockForConfiguration before calling this method."};
NSError *error = [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
code:kRTCAudioSessionErrorLockRequired
userInfo:userInfo];
NSDictionary *userInfo = @{
NSLocalizedDescriptionKey :
@"Must call lockForConfiguration before calling this method."
};
NSError *error =
[[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
code:kRTCAudioSessionErrorLockRequired
userInfo:userInfo];
return error;
}
@ -687,8 +709,8 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
@synchronized(self) {
if (_isInterrupted == isInterrupted) {
return;
}
_isInterrupted = isInterrupted;
}
_isInterrupted = isInterrupted;
}
}
@ -766,9 +788,9 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
if (sessionSampleRate != preferredSampleRate) {
RTCLogWarning(
@"Current sample rate (%.2f) is not the preferred rate (%.2f)",
sessionSampleRate, preferredSampleRate);
if (![self setPreferredSampleRate:sessionSampleRate
error:&error]) {
sessionSampleRate,
preferredSampleRate);
if (![self setPreferredSampleRate:sessionSampleRate error:&error]) {
RTCLogError(@"Failed to set preferred sample rate: %@",
error.localizedDescription);
if (outError) {
@ -791,8 +813,8 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
}
- (NSError *)configurationErrorWithDescription:(NSString *)description {
NSDictionary* userInfo = @{
NSLocalizedDescriptionKey: description,
NSDictionary *userInfo = @{
NSLocalizedDescriptionKey : description,
};
return [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
code:kRTCAudioSessionErrorConfiguration
@ -802,7 +824,8 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
- (void)updateAudioSessionAfterEvent {
BOOL shouldActivate = self.activationCount > 0;
AVAudioSessionSetActiveOptions options = shouldActivate ?
0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
0 :
AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
NSError *error = nil;
if ([self.session setActive:shouldActivate
withOptions:options
@ -810,7 +833,8 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
self.isActive = shouldActivate;
} else {
RTCLogError(@"Failed to set session active to %d. Error:%@",
shouldActivate, error.localizedDescription);
shouldActivate,
error.localizedDescription);
}
}
@ -851,7 +875,8 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
- (void)audioSessionDidDeactivate:(AVAudioSession *)session {
if (_session != session) {
RTCLogError(@"audioSessionDidDeactivate called on different AVAudioSession");
RTCLogError(
@"audioSessionDidDeactivate called on different AVAudioSession");
}
RTCLog(@"Audio session was externally deactivated.");
self.isActive = NO;
@ -881,9 +906,10 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
SEL sel = @selector(audioSession:audioUnitStartFailedWithError:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSession:self
audioUnitStartFailedWithError:[NSError errorWithDomain:kRTCAudioSessionErrorDomain
code:error
userInfo:nil]];
audioUnitStartFailedWithError:
[NSError errorWithDomain:kRTCAudioSessionErrorDomain
code:error
userInfo:nil]];
}
}
}
@ -909,7 +935,8 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
}
- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
previousRoute:
(AVAudioSessionRouteDescription *)previousRoute {
for (auto delegate : self.delegates) {
SEL sel = @selector(audioSessionDidChangeRoute:reason:previousRoute:);
if ([delegate respondsToSelector:sel]) {

View File

@ -39,7 +39,8 @@ RTC_OBJC_EXPORT
/** Returns the configuration that WebRTC needs. */
+ (instancetype)webRTCConfiguration;
/** Provide a way to override the default configuration. */
+ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration;
+ (void)setWebRTCConfiguration:
(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration;
@end

View File

@ -19,14 +19,16 @@ class AudioSessionObserver;
/** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate
* methods on the AudioSessionObserver.
*/
@interface RTCNativeAudioSessionDelegateAdapter : NSObject <RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
@interface RTCNativeAudioSessionDelegateAdapter
: NSObject <RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
- (instancetype)init NS_UNAVAILABLE;
/** `observer` is a raw pointer and should be kept alive
* for this object's lifetime.
*/
- (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer NS_DESIGNATED_INITIALIZER;
- (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer
NS_DESIGNATED_INITIALIZER;
@end

View File

@ -29,7 +29,8 @@
#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
- (void)audioSessionDidBeginInterruption:
(RTC_OBJC_TYPE(RTCAudioSession) *)session {
_observer->OnInterruptionBegin();
}
@ -40,7 +41,8 @@
- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
reason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
previousRoute:
(AVAudioSessionRouteDescription *)previousRoute {
switch (reason) {
case AVAudioSessionRouteChangeReasonUnknown:
case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
@ -65,7 +67,8 @@
}
}
- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
- (void)audioSessionMediaServerTerminated:
(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
@ -76,10 +79,12 @@
_observer->OnCanPlayOrRecordChange(canPlayOrRecord);
}
- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
- (void)audioSessionDidStartPlayOrRecord:
(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
- (void)audioSessionDidStopPlayOrRecord:
(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession

View File

@ -28,21 +28,25 @@ RTC_OBJC_EXPORT
// Returns list of available capture devices that support video capture.
+ (NSArray<AVCaptureDevice *> *)captureDevices;
// Returns list of formats that are supported by this class for this device.
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device;
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:
(AVCaptureDevice *)device;
// Returns the most efficient supported output pixel format for this capturer.
- (FourCharCode)preferredOutputPixelFormat;
// Starts the capture session asynchronously and notifies callback on completion.
// The device will capture video in the format given in the `format` parameter. If the pixel format
// in `format` is supported by the WebRTC pipeline, the same pixel format will be used for the
// output. Otherwise, the format returned by `preferredOutputPixelFormat` will be used.
// Starts the capture session asynchronously and notifies callback on
// completion. The device will capture video in the format given in the `format`
// parameter. If the pixel format in `format` is supported by the WebRTC
// pipeline, the same pixel format will be used for the output. Otherwise, the
// format returned by `preferredOutputPixelFormat` will be used.
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps
completionHandler:(nullable void (^)(NSError *_Nullable))completionHandler;
completionHandler:
(nullable void (^)(NSError *_Nullable))completionHandler;
// Stops the capture session asynchronously and notifies callback on completion.
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler;
- (void)stopCaptureWithCompletionHandler:
(nullable void (^)(void))completionHandler;
// Starts the capture session asynchronously.
- (void)startCaptureWithDevice:(AVCaptureDevice *)device

View File

@ -19,7 +19,8 @@
namespace {
rtc::AdapterType AdapterTypeFromInterfaceType(nw_interface_type_t interfaceType) {
rtc::AdapterType AdapterTypeFromInterfaceType(
nw_interface_type_t interfaceType) {
rtc::AdapterType adapterType = rtc::ADAPTER_TYPE_UNKNOWN;
switch (interfaceType) {
case nw_interface_type_other:
@ -82,16 +83,22 @@ rtc::AdapterType AdapterTypeFromInterfaceType(nw_interface_type_t interfaceType)
RTCLog(@"NW path monitor status: satisfiable.");
}
std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp> *map =
new std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp>();
new std::
map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp>();
nw_path_enumerate_interfaces(
path, (nw_path_enumerate_interfaces_block_t) ^ (nw_interface_t interface) {
const char *name = nw_interface_get_name(interface);
nw_interface_type_t interfaceType = nw_interface_get_type(interface);
RTCLog(@"NW path monitor available interface: %s", name);
rtc::AdapterType adapterType = AdapterTypeFromInterfaceType(interfaceType);
map->insert(std::pair<std::string, rtc::AdapterType>(name, adapterType));
return true;
});
path,
(nw_path_enumerate_interfaces_block_t) ^
(nw_interface_t interface) {
const char *name = nw_interface_get_name(interface);
nw_interface_type_t interfaceType =
nw_interface_get_type(interface);
RTCLog(@"NW path monitor available interface: %s", name);
rtc::AdapterType adapterType =
AdapterTypeFromInterfaceType(interfaceType);
map->insert(std::pair<std::string, rtc::AdapterType>(
name, adapterType));
return true;
});
@synchronized(strongSelf) {
webrtc::NetworkMonitorObserver *observer = strongSelf->_observer;
if (observer) {
@ -102,7 +109,8 @@ rtc::AdapterType AdapterTypeFromInterfaceType(nw_interface_type_t interfaceType)
});
nw_path_monitor_set_queue(
_pathMonitor,
[RTC_OBJC_TYPE(RTCDispatcher) dispatchQueueForType:RTCDispatcherTypeNetworkMonitor]);
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchQueueForType:RTCDispatcherTypeNetworkMonitor]);
nw_path_monitor_start(_pathMonitor);
}
}

View File

@ -29,12 +29,12 @@ static NSString *const shaderSource = MTL_STRINGIFY(
} Vertex;
typedef struct {
float4 position[[position]];
float4 position [[position]];
float2 texcoord;
} Varyings;
vertex Varyings vertexPassthrough(constant Vertex *verticies[[buffer(0)]],
unsigned int vid[[vertex_id]]) {
vertex Varyings vertexPassthrough(constant Vertex * verticies [[buffer(0)]],
unsigned int vid [[vertex_id]]) {
Varyings out;
constant Vertex &v = verticies[vid];
out.position = float4(float2(v.position), 0.0, 1.0);
@ -44,10 +44,10 @@ static NSString *const shaderSource = MTL_STRINGIFY(
}
fragment half4 fragmentColorConversion(
Varyings in[[stage_in]],
texture2d<float, access::sample> textureY[[texture(0)]],
texture2d<float, access::sample> textureU[[texture(1)]],
texture2d<float, access::sample> textureV[[texture(2)]]) {
Varyings in [[stage_in]],
texture2d<float, access::sample> textureY [[texture(0)]],
texture2d<float, access::sample> textureU [[texture(1)]],
texture2d<float, access::sample> textureV [[texture(2)]]) {
constexpr sampler s(address::clamp_to_edge, filter::linear);
float y;
float u;
@ -129,10 +129,11 @@ static NSString *const shaderSource = MTL_STRINGIFY(
if (!_descriptor || _width != frame.width || _height != frame.height) {
_width = frame.width;
_height = frame.height;
_descriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
width:_width
height:_height
mipmapped:NO];
_descriptor = [MTLTextureDescriptor
texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
width:_width
height:_height
mipmapped:NO];
_descriptor.usage = MTLTextureUsageShaderRead;
_yTexture = [device newTextureWithDescriptor:_descriptor];
}
@ -143,14 +144,15 @@ static NSString *const shaderSource = MTL_STRINGIFY(
withBytes:buffer.dataY
bytesPerRow:buffer.strideY];
if (!_chromaDescriptor || _chromaWidth != frame.width / 2 || _chromaHeight != frame.height / 2) {
if (!_chromaDescriptor || _chromaWidth != frame.width / 2 ||
_chromaHeight != frame.height / 2) {
_chromaWidth = frame.width / 2;
_chromaHeight = frame.height / 2;
_chromaDescriptor =
[MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
width:_chromaWidth
height:_chromaHeight
mipmapped:NO];
_chromaDescriptor = [MTLTextureDescriptor
texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
width:_chromaWidth
height:_chromaHeight
mipmapped:NO];
_chromaDescriptor.usage = MTLTextureUsageShaderRead;
_uTexture = [device newTextureWithDescriptor:_chromaDescriptor];
_vTexture = [device newTextureWithDescriptor:_chromaDescriptor];
@ -168,7 +170,8 @@ static NSString *const shaderSource = MTL_STRINGIFY(
return (_uTexture != nil) && (_yTexture != nil) && (_vTexture != nil);
}
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
- (void)uploadTexturesToRenderEncoder:
(id<MTLRenderCommandEncoder>)renderEncoder {
[renderEncoder setFragmentTexture:_yTexture atIndex:0];
[renderEncoder setFragmentTexture:_uTexture atIndex:1];
[renderEncoder setFragmentTexture:_vTexture atIndex:2];

View File

@ -30,12 +30,12 @@ static NSString *const shaderSource = MTL_STRINGIFY(
} Vertex;
typedef struct {
float4 position[[position]];
float4 position [[position]];
float2 texcoord;
} Varyings;
vertex Varyings vertexPassthrough(constant Vertex *verticies[[buffer(0)]],
unsigned int vid[[vertex_id]]) {
vertex Varyings vertexPassthrough(constant Vertex * verticies [[buffer(0)]],
unsigned int vid [[vertex_id]]) {
Varyings out;
constant Vertex &v = verticies[vid];
out.position = float4(float2(v.position), 0.0, 1.0);
@ -45,9 +45,9 @@ static NSString *const shaderSource = MTL_STRINGIFY(
// Receiving YCrCb textures.
fragment half4 fragmentColorConversion(
Varyings in[[stage_in]],
texture2d<float, access::sample> textureY[[texture(0)]],
texture2d<float, access::sample> textureCbCr[[texture(1)]]) {
Varyings in [[stage_in]],
texture2d<float, access::sample> textureY [[texture(0)]],
texture2d<float, access::sample> textureCbCr [[texture(1)]]) {
constexpr sampler s(address::clamp_to_edge, filter::linear);
float y;
float2 uv;
@ -55,7 +55,10 @@ static NSString *const shaderSource = MTL_STRINGIFY(
uv = textureCbCr.sample(s, in.texcoord).rg - float2(0.5, 0.5);
// Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php
float4 out = float4(y + 1.403 * uv.y, y - 0.344 * uv.x - 0.714 * uv.y, y + 1.770 * uv.x, 1.0);
float4 out = float4(y + 1.403 * uv.y,
y - 0.344 * uv.x - 0.714 * uv.y,
y + 1.770 * uv.x,
1.0);
return half4(out);
});
@ -75,10 +78,12 @@ static NSString *const shaderSource = MTL_STRINGIFY(
}
- (BOOL)initializeTextureCache {
CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice],
nil, &_textureCache);
CVReturn status = CVMetalTextureCacheCreate(
kCFAllocatorDefault, nil, [self currentMetalDevice], nil, &_textureCache);
if (status != kCVReturnSuccess) {
RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status);
RTCLogError(
@"Metal: Failed to initialize metal texture cache. Return status is %d",
status);
return NO;
}
@ -96,7 +101,8 @@ static NSString *const shaderSource = MTL_STRINGIFY(
cropX:(nonnull int *)cropX
cropY:(nonnull int *)cropY
ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer =
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
*width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
*height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
*cropWidth = pixelBuffer.cropWidth;
@ -106,11 +112,13 @@ static NSString *const shaderSource = MTL_STRINGIFY(
}
- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]);
RTC_DCHECK(
[frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]);
if (![super setupTexturesForFrame:frame]) {
return NO;
}
CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer;
CVPixelBufferRef pixelBuffer =
((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer;
id<MTLTexture> lumaTexture = nil;
id<MTLTexture> chromaTexture = nil;
@ -121,9 +129,16 @@ static NSString *const shaderSource = MTL_STRINGIFY(
int lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
int indexPlane = 0;
CVReturn result = CVMetalTextureCacheCreateTextureFromImage(
kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatR8Unorm, lumaWidth,
lumaHeight, indexPlane, &outTexture);
CVReturn result =
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_textureCache,
pixelBuffer,
nil,
MTLPixelFormatR8Unorm,
lumaWidth,
lumaHeight,
indexPlane,
&outTexture);
if (result == kCVReturnSuccess) {
lumaTexture = CVMetalTextureGetTexture(outTexture);
@ -135,9 +150,15 @@ static NSString *const shaderSource = MTL_STRINGIFY(
// Chroma (CrCb) texture.
indexPlane = 1;
result = CVMetalTextureCacheCreateTextureFromImage(
kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatRG8Unorm, lumaWidth / 2,
lumaHeight / 2, indexPlane, &outTexture);
result = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_textureCache,
pixelBuffer,
nil,
MTLPixelFormatRG8Unorm,
lumaWidth / 2,
lumaHeight / 2,
indexPlane,
&outTexture);
if (result == kCVReturnSuccess) {
chromaTexture = CVMetalTextureGetTexture(outTexture);
}
@ -151,7 +172,8 @@ static NSString *const shaderSource = MTL_STRINGIFY(
return NO;
}
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
- (void)uploadTexturesToRenderEncoder:
(id<MTLRenderCommandEncoder>)renderEncoder {
[renderEncoder setFragmentTexture:_yTexture atIndex:0];
[renderEncoder setFragmentTexture:_CrCbTexture atIndex:1];
}

View File

@ -30,12 +30,12 @@ static NSString *const shaderSource = MTL_STRINGIFY(
} Vertex;
typedef struct {
float4 position[[position]];
float4 position [[position]];
float2 texcoord;
} VertexIO;
vertex VertexIO vertexPassthrough(constant Vertex *verticies[[buffer(0)]],
uint vid[[vertex_id]]) {
vertex VertexIO vertexPassthrough(constant Vertex * verticies [[buffer(0)]],
uint vid [[vertex_id]]) {
VertexIO out;
constant Vertex &v = verticies[vid];
out.position = float4(float2(v.position), 0.0, 1.0);
@ -43,9 +43,10 @@ static NSString *const shaderSource = MTL_STRINGIFY(
return out;
}
fragment half4 fragmentColorConversion(VertexIO in[[stage_in]],
texture2d<half, access::sample> texture[[texture(0)]],
constant bool &isARGB[[buffer(0)]]) {
fragment half4 fragmentColorConversion(
VertexIO in [[stage_in]],
texture2d<half, access::sample> texture [[texture(0)]],
constant bool &isARGB [[buffer(0)]]) {
constexpr sampler s(address::clamp_to_edge, filter::linear);
half4 out = texture.sample(s, in.texcoord);
@ -73,10 +74,12 @@ static NSString *const shaderSource = MTL_STRINGIFY(
}
- (BOOL)initializeTextureCache {
CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice],
nil, &_textureCache);
CVReturn status = CVMetalTextureCacheCreate(
kCFAllocatorDefault, nil, [self currentMetalDevice], nil, &_textureCache);
if (status != kCVReturnSuccess) {
RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status);
RTCLogError(
@"Metal: Failed to initialize metal texture cache. Return status is %d",
status);
return NO;
}
@ -94,7 +97,8 @@ static NSString *const shaderSource = MTL_STRINGIFY(
cropX:(nonnull int *)cropX
cropY:(nonnull int *)cropY
ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer =
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
*width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
*height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
*cropWidth = pixelBuffer.cropWidth;
@ -104,11 +108,13 @@ static NSString *const shaderSource = MTL_STRINGIFY(
}
- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]);
RTC_DCHECK(
[frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]);
if (![super setupTexturesForFrame:frame]) {
return NO;
}
CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer;
CVPixelBufferRef pixelBuffer =
((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer;
id<MTLTexture> gpuTexture = nil;
CVMetalTextureRef textureOut = nullptr;
@ -130,9 +136,16 @@ static NSString *const shaderSource = MTL_STRINGIFY(
return NO;
}
CVReturn result = CVMetalTextureCacheCreateTextureFromImage(
kCFAllocatorDefault, _textureCache, pixelBuffer, nil, mtlPixelFormat,
width, height, 0, &textureOut);
CVReturn result =
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_textureCache,
pixelBuffer,
nil,
mtlPixelFormat,
width,
height,
0,
&textureOut);
if (result == kCVReturnSuccess) {
gpuTexture = CVMetalTextureGetTexture(textureOut);
}
@ -140,17 +153,18 @@ static NSString *const shaderSource = MTL_STRINGIFY(
if (gpuTexture != nil) {
_texture = gpuTexture;
_uniformsBuffer =
[[self currentMetalDevice] newBufferWithBytes:&isARGB
length:sizeof(isARGB)
options:MTLResourceCPUCacheModeDefaultCache];
_uniformsBuffer = [[self currentMetalDevice]
newBufferWithBytes:&isARGB
length:sizeof(isARGB)
options:MTLResourceCPUCacheModeDefaultCache];
return YES;
}
return NO;
}
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
- (void)uploadTexturesToRenderEncoder:
(id<MTLRenderCommandEncoder>)renderEncoder {
[renderEncoder setFragmentTexture:_texture atIndex:0];
[renderEncoder setFragmentBuffer:_uniformsBuffer offset:0 atIndex:0];
}

View File

@ -20,7 +20,8 @@ NS_ASSUME_NONNULL_BEGIN
- (nullable id<MTLDevice>)currentMetalDevice;
- (NSString *)shaderSource;
- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder;
- (void)uploadTexturesToRenderEncoder:
(id<MTLRenderCommandEncoder>)renderEncoder;
- (void)getWidth:(nonnull int *)width
height:(nonnull int *)height
cropWidth:(nonnull int *)cropWidth

View File

@ -33,8 +33,8 @@ NS_ASSUME_NONNULL_BEGIN
/**
* Sets the provided view as rendering destination if possible.
*
* If not possible method returns NO and callers of the method are responisble for performing
* cleanups.
* If not possible method returns NO and callers of the method are responisble
* for performing cleanups.
*/
#if TARGET_OS_IOS
@ -52,7 +52,8 @@ NS_AVAILABLE(10_11, 9_0)
@interface RTCMTLRenderer : NSObject <RTCMTLRenderer>
/** @abstract A wrapped RTCVideoRotation, or nil.
@discussion When not nil, the rotation of the actual frame is ignored when rendering.
@discussion When not nil, the rotation of the actual frame is ignored when
rendering.
*/
@property(atomic, nullable) NSValue *rotationOverride;

View File

@ -39,44 +39,93 @@ static inline void getCubeVertexData(int cropX,
RTCVideoRotation rotation,
float *buffer) {
// The computed values are the adjusted texture coordinates, in [0..1].
// For the left and top, 0.0 means no cropping and e.g. 0.2 means we're skipping 20% of the
// left/top edge.
// For the right and bottom, 1.0 means no cropping and e.g. 0.8 means we're skipping 20% of the
// right/bottom edge (i.e. render up to 80% of the width/height).
// For the left and top, 0.0 means no cropping and e.g. 0.2 means we're
// skipping 20% of the left/top edge. For the right and bottom, 1.0 means no
// cropping and e.g. 0.8 means we're skipping 20% of the right/bottom edge
// (i.e. render up to 80% of the width/height).
float cropLeft = cropX / (float)frameWidth;
float cropRight = (cropX + cropWidth) / (float)frameWidth;
float cropTop = cropY / (float)frameHeight;
float cropBottom = (cropY + cropHeight) / (float)frameHeight;
// These arrays map the view coordinates to texture coordinates, taking cropping and rotation
// into account. The first two columns are view coordinates, the last two are texture coordinates.
// These arrays map the view coordinates to texture coordinates, taking
// cropping and rotation into account. The first two columns are view
// coordinates, the last two are texture coordinates.
switch (rotation) {
case RTCVideoRotation_0: {
float values[16] = {-1.0, -1.0, cropLeft, cropBottom,
1.0, -1.0, cropRight, cropBottom,
-1.0, 1.0, cropLeft, cropTop,
1.0, 1.0, cropRight, cropTop};
float values[16] = {-1.0,
-1.0,
cropLeft,
cropBottom,
1.0,
-1.0,
cropRight,
cropBottom,
-1.0,
1.0,
cropLeft,
cropTop,
1.0,
1.0,
cropRight,
cropTop};
memcpy(buffer, &values, sizeof(values));
} break;
case RTCVideoRotation_90: {
float values[16] = {-1.0, -1.0, cropRight, cropBottom,
1.0, -1.0, cropRight, cropTop,
-1.0, 1.0, cropLeft, cropBottom,
1.0, 1.0, cropLeft, cropTop};
float values[16] = {-1.0,
-1.0,
cropRight,
cropBottom,
1.0,
-1.0,
cropRight,
cropTop,
-1.0,
1.0,
cropLeft,
cropBottom,
1.0,
1.0,
cropLeft,
cropTop};
memcpy(buffer, &values, sizeof(values));
} break;
case RTCVideoRotation_180: {
float values[16] = {-1.0, -1.0, cropRight, cropTop,
1.0, -1.0, cropLeft, cropTop,
-1.0, 1.0, cropRight, cropBottom,
1.0, 1.0, cropLeft, cropBottom};
float values[16] = {-1.0,
-1.0,
cropRight,
cropTop,
1.0,
-1.0,
cropLeft,
cropTop,
-1.0,
1.0,
cropRight,
cropBottom,
1.0,
1.0,
cropLeft,
cropBottom};
memcpy(buffer, &values, sizeof(values));
} break;
case RTCVideoRotation_270: {
float values[16] = {-1.0, -1.0, cropLeft, cropTop,
1.0, -1.0, cropLeft, cropBottom,
-1.0, 1.0, cropRight, cropTop,
1.0, 1.0, cropRight, cropBottom};
float values[16] = {-1.0,
-1.0,
cropLeft,
cropTop,
1.0,
-1.0,
cropLeft,
cropBottom,
-1.0,
1.0,
cropRight,
cropTop,
1.0,
1.0,
cropRight,
cropBottom};
memcpy(buffer, &values, sizeof(values));
} break;
}
@ -102,7 +151,8 @@ static const NSInteger kMaxInflightBuffers = 1;
// Buffers.
id<MTLBuffer> _vertexBuffer;
// Values affecting the vertex buffer. Stored for comparison to avoid unnecessary recreation.
// Values affecting the vertex buffer. Stored for comparison to avoid
// unnecessary recreation.
int _oldFrameWidth;
int _oldFrameHeight;
int _oldCropWidth;
@ -140,9 +190,10 @@ static const NSInteger kMaxInflightBuffers = 1;
[self loadAssets];
float vertexBufferArray[16] = {0};
_vertexBuffer = [_device newBufferWithBytes:vertexBufferArray
length:sizeof(vertexBufferArray)
options:MTLResourceCPUCacheModeWriteCombined];
_vertexBuffer =
[_device newBufferWithBytes:vertexBufferArray
length:sizeof(vertexBufferArray)
options:MTLResourceCPUCacheModeWriteCombined];
success = YES;
}
return success;
@ -158,7 +209,8 @@ static const NSInteger kMaxInflightBuffers = 1;
return nil;
}
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
- (void)uploadTexturesToRenderEncoder:
(id<MTLRenderCommandEncoder>)renderEncoder {
RTC_DCHECK_NOTREACHED() << "Virtual method not implemented in subclass.";
}
@ -201,8 +253,8 @@ static const NSInteger kMaxInflightBuffers = 1;
// Recompute the texture cropping and recreate vertexBuffer if necessary.
if (cropX != _oldCropX || cropY != _oldCropY || cropWidth != _oldCropWidth ||
cropHeight != _oldCropHeight || rotation != _oldRotation || frameWidth != _oldFrameWidth ||
frameHeight != _oldFrameHeight) {
cropHeight != _oldCropHeight || rotation != _oldRotation ||
frameWidth != _oldFrameWidth || frameHeight != _oldFrameHeight) {
getCubeVertexData(cropX,
cropY,
cropWidth,
@ -239,8 +291,9 @@ static const NSInteger kMaxInflightBuffers = 1;
NSError *libraryError = nil;
NSString *shaderSource = [self shaderSource];
id<MTLLibrary> sourceLibrary =
[_device newLibraryWithSource:shaderSource options:NULL error:&libraryError];
id<MTLLibrary> sourceLibrary = [_device newLibraryWithSource:shaderSource
options:NULL
error:&libraryError];
if (libraryError) {
RTCLogError(@"Metal: Library with source failed\n%@", libraryError);
@ -257,17 +310,22 @@ static const NSInteger kMaxInflightBuffers = 1;
}
- (void)loadAssets {
id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
id<MTLFunction> vertexFunction =
[_defaultLibrary newFunctionWithName:vertexFunctionName];
id<MTLFunction> fragmentFunction =
[_defaultLibrary newFunctionWithName:fragmentFunctionName];
MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescriptor alloc] init];
MTLRenderPipelineDescriptor *pipelineDescriptor =
[[MTLRenderPipelineDescriptor alloc] init];
pipelineDescriptor.label = pipelineDescriptorLabel;
pipelineDescriptor.vertexFunction = vertexFunction;
pipelineDescriptor.fragmentFunction = fragmentFunction;
pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat;
pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid;
NSError *error = nil;
_pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:&error];
_pipelineState =
[_device newRenderPipelineStateWithDescriptor:pipelineDescriptor
error:&error];
if (!_pipelineState) {
RTCLogError(@"Metal: Failed to create pipeline state. %@", error);
@ -284,7 +342,8 @@ static const NSInteger kMaxInflightBuffers = 1;
dispatch_semaphore_signal(block_semaphore);
}];
MTLRenderPassDescriptor *renderPassDescriptor = _view.currentRenderPassDescriptor;
MTLRenderPassDescriptor *renderPassDescriptor =
_view.currentRenderPassDescriptor;
if (renderPassDescriptor) { // Valid drawable.
id<MTLRenderCommandEncoder> renderEncoder =
[commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];

View File

@ -13,8 +13,9 @@
NS_ASSUME_NONNULL_BEGIN
/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView
* and RTCEAGLVideoView if no external shader is specified. This shader will render
* the video in a rectangle without any color or geometric transformations.
* and RTCEAGLVideoView if no external shader is specified. This shader will
* render the video in a rectangle without any color or geometric
* transformations.
*/
@interface RTCDefaultShader : NSObject <RTC_OBJC_TYPE (RTCVideoViewShading)>

View File

@ -25,50 +25,44 @@ static const int kUvTextureUnit = 1;
// Fragment shader converts YUV values from input textures into a final RGB
// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
static const char kI420FragmentShaderSource[] =
SHADER_VERSION
"precision highp float;"
FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
"uniform lowp sampler2D s_textureY;\n"
"uniform lowp sampler2D s_textureU;\n"
"uniform lowp sampler2D s_textureV;\n"
FRAGMENT_SHADER_OUT
"void main() {\n"
" float y, u, v, r, g, b;\n"
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
" u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
" v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
" u = u - 0.5;\n"
" v = v - 0.5;\n"
" r = y + 1.403 * v;\n"
" g = y - 0.344 * u - 0.714 * v;\n"
" b = y + 1.770 * u;\n"
" " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
" }\n";
static const char kI420FragmentShaderSource[] = SHADER_VERSION
"precision highp float;" FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
"uniform lowp sampler2D s_textureY;\n"
"uniform lowp sampler2D s_textureU;\n"
"uniform lowp sampler2D s_textureV;\n" FRAGMENT_SHADER_OUT "void main() {\n"
" float y, u, v, r, g, b;\n"
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
" u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
" v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
" u = u - 0.5;\n"
" v = v - 0.5;\n"
" r = y + 1.403 * v;\n"
" g = y - 0.344 * u - 0.714 * v;\n"
" b = y + 1.770 * u;\n"
" " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
" }\n";
static const char kNV12FragmentShaderSource[] =
SHADER_VERSION
"precision mediump float;"
FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
"uniform lowp sampler2D s_textureY;\n"
"uniform lowp sampler2D s_textureUV;\n"
FRAGMENT_SHADER_OUT
"void main() {\n"
" mediump float y;\n"
" mediump vec2 uv;\n"
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
" uv = " FRAGMENT_SHADER_TEXTURE "(s_textureUV, v_texcoord).ra -\n"
" vec2(0.5, 0.5);\n"
" " FRAGMENT_SHADER_COLOR " = vec4(y + 1.403 * uv.y,\n"
" y - 0.344 * uv.x - 0.714 * uv.y,\n"
" y + 1.770 * uv.x,\n"
" 1.0);\n"
" }\n";
static const char kNV12FragmentShaderSource[] = SHADER_VERSION
"precision mediump float;" FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
"uniform lowp sampler2D s_textureY;\n"
"uniform lowp sampler2D s_textureUV;\n" FRAGMENT_SHADER_OUT
"void main() {\n"
" mediump float y;\n"
" mediump vec2 uv;\n"
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
" uv = " FRAGMENT_SHADER_TEXTURE "(s_textureUV, v_texcoord).ra -\n"
" vec2(0.5, 0.5);\n"
" " FRAGMENT_SHADER_COLOR " = vec4(y + 1.403 * uv.y,\n"
" y - 0.344 * uv.x - 0.714 * uv.y,\n"
" y + 1.770 * uv.x,\n"
" 1.0);\n"
" }\n";
@implementation RTCDefaultShader {
GLuint _vertexBuffer;
GLuint _vertexArray;
// Store current rotation and only upload new vertex data when rotation changes.
// Store current rotation and only upload new vertex data when rotation
// changes.
std::optional<RTCVideoRotation> _currentRotation;
GLuint _i420Program;

View File

@ -18,7 +18,8 @@
@property(nonatomic, readonly) GLuint vTexture;
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER;
- (instancetype)initWithContext:(GlContextType *)context
NS_DESIGNATED_INITIALIZER;
- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;

View File

@ -17,9 +17,9 @@
#include <vector>
// Two sets of 3 textures are used here, one for each of the Y, U and V planes. Having two sets
// alleviates CPU blockage in the event that the GPU is asked to render to a texture that is already
// in use.
// Two sets of 3 textures are used here, one for each of the Y, U and V planes.
// Having two sets alleviates CPU blockage in the event that the GPU is asked to
// render to a texture that is already in use.
static const GLsizei kNumTextureSets = 2;
static const GLsizei kNumTexturesPerSet = 3;
static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets;
@ -29,7 +29,8 @@ static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets;
GLint _currentTextureSet;
// Handles for OpenGL constructs.
GLuint _textures[kNumTextures];
// Used to create a non-padded plane for GPU upload when we receive padded frames.
// Used to create a non-padded plane for GPU upload when we receive padded
// frames.
std::vector<uint8_t> _planeBuffer;
}
@ -81,7 +82,7 @@ static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets;
const uint8_t *uploadPlane = plane;
if ((size_t)stride != width) {
if (_hasUnpackRowLength) {
if (_hasUnpackRowLength) {
// GLES3 allows us to specify stride.
glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
glTexImage2D(GL_TEXTURE_2D,

View File

@ -22,7 +22,8 @@ NS_ASSUME_NONNULL_BEGIN
@property(nonatomic, readonly) GLuint uvTexture;
- (instancetype)init NS_UNAVAILABLE;
- (nullable instancetype)initWithContext:(EAGLContext *)context NS_DESIGNATED_INITIALIZER;
- (nullable instancetype)initWithContext:(EAGLContext *)context
NS_DESIGNATED_INITIALIZER;
- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;

View File

@ -22,15 +22,13 @@
#include "rtc_base/logging.h"
// Vertex shader doesn't do anything except pass coordinates through.
const char kRTCVertexShaderSource[] =
SHADER_VERSION
VERTEX_SHADER_IN " vec2 position;\n"
VERTEX_SHADER_IN " vec2 texcoord;\n"
VERTEX_SHADER_OUT " vec2 v_texcoord;\n"
"void main() {\n"
" gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n"
" v_texcoord = texcoord;\n"
"}\n";
const char kRTCVertexShaderSource[] = SHADER_VERSION VERTEX_SHADER_IN
" vec2 position;\n" VERTEX_SHADER_IN " vec2 texcoord;\n" VERTEX_SHADER_OUT
" vec2 v_texcoord;\n"
"void main() {\n"
" gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n"
" v_texcoord = texcoord;\n"
"}\n";
// Compiles a shader of the given `type` with GLSL source `source` and returns
// the shader handle or 0 on error.
@ -84,7 +82,8 @@ GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader) {
// Creates and links a shader program with the given fragment shader source and
// a plain vertex shader. Returns the program handle or 0 on error.
GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) {
GLuint vertexShader = RTCCreateShader(GL_VERTEX_SHADER, kRTCVertexShaderSource);
GLuint vertexShader =
RTCCreateShader(GL_VERTEX_SHADER, kRTCVertexShaderSource);
RTC_CHECK(vertexShader) << "failed to create vertex shader";
GLuint fragmentShader =
RTCCreateShader(GL_FRAGMENT_SHADER, fragmentShaderSource);
@ -106,15 +105,22 @@ GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) {
return 0;
}
// Read position attribute with size of 2 and stride of 4 beginning at the start of the array. The
// last argument indicates offset of data within the vertex buffer.
glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)0);
// Read position attribute with size of 2 and stride of 4 beginning at the
// start of the array. The last argument indicates offset of data within the
// vertex buffer.
glVertexAttribPointer(
position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)0);
glEnableVertexAttribArray(position);
// Read texcoord attribute with size of 2 and stride of 4 beginning at the first texcoord in the
// array. The last argument indicates offset of data within the vertex buffer.
glVertexAttribPointer(
texcoord, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)(2 * sizeof(GLfloat)));
// Read texcoord attribute with size of 2 and stride of 4 beginning at the
// first texcoord in the array. The last argument indicates offset of data
// within the vertex buffer.
glVertexAttribPointer(texcoord,
2,
GL_FLOAT,
GL_FALSE,
4 * sizeof(GLfloat),
(void *)(2 * sizeof(GLfloat)));
glEnableVertexAttribArray(texcoord);
return program;
@ -163,15 +169,27 @@ void RTCSetVertexData(RTCVideoRotation rotation) {
rotation_offset = 3;
break;
}
std::rotate(UVCoords.begin(), UVCoords.begin() + rotation_offset,
UVCoords.end());
std::rotate(
UVCoords.begin(), UVCoords.begin() + rotation_offset, UVCoords.end());
const GLfloat gVertices[] = {
// X, Y, U, V.
-1, -1, UVCoords[0][0], UVCoords[0][1],
1, -1, UVCoords[1][0], UVCoords[1][1],
1, 1, UVCoords[2][0], UVCoords[2][1],
-1, 1, UVCoords[3][0], UVCoords[3][1],
-1,
-1,
UVCoords[0][0],
UVCoords[0][1],
1,
-1,
UVCoords[1][0],
UVCoords[1][1],
1,
1,
UVCoords[2][0],
UVCoords[2][1],
-1,
1,
UVCoords[3][0],
UVCoords[3][1],
};
glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(gVertices), gVertices);

View File

@ -15,17 +15,17 @@
NS_ASSUME_NONNULL_BEGIN
/**
* RTCVideoViewShading provides a way for apps to customize the OpenGL(ES shaders
* used in rendering for the RTCEAGLVideoView/RTCNSGLVideoView.
* RTCVideoViewShading provides a way for apps to customize the OpenGL(ES
* shaders used in rendering for the RTCEAGLVideoView/RTCNSGLVideoView.
*/
RTC_OBJC_EXPORT
@protocol RTC_OBJC_TYPE
(RTCVideoViewShading)<NSObject>
/** Callback for I420 frames. Each plane is given as a texture. */
- (void)applyShadingForFrameWithWidth : (int)width height : (int)height rotation
: (RTCVideoRotation)rotation yPlane : (GLuint)yPlane uPlane : (GLuint)uPlane vPlane
: (GLuint)vPlane;
- (void)applyShadingForFrameWithWidth : (int)width height
: (int)height rotation : (RTCVideoRotation)rotation yPlane
: (GLuint)yPlane uPlane : (GLuint)uPlane vPlane : (GLuint)vPlane;
/** Callback for NV12 frames. Each plane is given as a texture. */
- (void)applyShadingForFrameWithWidth:(int)width

View File

@ -15,8 +15,9 @@
/** Class for H264 specific config. */
typedef NS_ENUM(NSUInteger, RTCH264PacketizationMode) {
RTCH264PacketizationModeNonInterleaved = 0, // Mode 1 - STAP-A, FU-A is allowed
RTCH264PacketizationModeSingleNalUnit // Mode 0 - only single NALU allowed
RTCH264PacketizationModeNonInterleaved =
0, // Mode 1 - STAP-A, FU-A is allowed
RTCH264PacketizationModeSingleNalUnit // Mode 0 - only single NALU allowed
};
RTC_OBJC_EXPORT

View File

@ -15,9 +15,9 @@
NS_ASSUME_NONNULL_BEGIN
/** This decoder factory include support for all codecs bundled with WebRTC. If using custom
* codecs, create custom implementations of RTCVideoEncoderFactory and
* RTCVideoDecoderFactory.
/** This decoder factory include support for all codecs bundled with WebRTC. If
* using custom codecs, create custom implementations of RTCVideoEncoderFactory
* and RTCVideoDecoderFactory.
*/
RTC_OBJC_EXPORT
@interface RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoderFactory)>

View File

@ -15,9 +15,9 @@
NS_ASSUME_NONNULL_BEGIN
/** This encoder factory include support for all codecs bundled with WebRTC. If using custom
* codecs, create custom implementations of RTCVideoEncoderFactory and
* RTCVideoDecoderFactory.
/** This encoder factory include support for all codecs bundled with WebRTC. If
* using custom codecs, create custom implementations of RTCVideoEncoderFactory
* and RTCVideoDecoderFactory.
*/
RTC_OBJC_EXPORT
@interface RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoderFactory)>

View File

@ -55,6 +55,7 @@ RTC_OBJC_EXPORT
@property(nonatomic, readonly) NSString *hexString;
- (instancetype)initWithHexString:(NSString *)hexString;
- (instancetype)initWithProfile:(RTCH264Profile)profile level:(RTCH264Level)level;
- (instancetype)initWithProfile:(RTCH264Profile)profile
level:(RTCH264Level)level;
@end

View File

@ -42,8 +42,8 @@ NSString *MaxSupportedLevelForProfile(webrtc::H264Profile profile) {
const std::optional<webrtc::H264ProfileLevelId> profileLevelId =
[UIDevice maxSupportedH264Profile];
if (profileLevelId && profileLevelId->profile >= profile) {
const std::optional<std::string> profileString =
H264ProfileLevelIdToString(webrtc::H264ProfileLevelId(profile, profileLevelId->level));
const std::optional<std::string> profileString = H264ProfileLevelIdToString(
webrtc::H264ProfileLevelId(profile, profileLevelId->level));
if (profileString) {
return [NSString stringForStdString:*profileString];
}
@ -54,7 +54,8 @@ NSString *MaxSupportedLevelForProfile(webrtc::H264Profile profile) {
NSString *MaxSupportedProfileLevelConstrainedBaseline() {
#if defined(WEBRTC_IOS)
NSString *profile = MaxSupportedLevelForProfile(webrtc::H264Profile::kProfileConstrainedBaseline);
NSString *profile = MaxSupportedLevelForProfile(
webrtc::H264Profile::kProfileConstrainedBaseline);
if (profile != nil) {
return profile;
}
@ -64,7 +65,8 @@ NSString *MaxSupportedProfileLevelConstrainedBaseline() {
NSString *MaxSupportedProfileLevelConstrainedHigh() {
#if defined(WEBRTC_IOS)
NSString *profile = MaxSupportedLevelForProfile(webrtc::H264Profile::kProfileConstrainedHigh);
NSString *profile =
MaxSupportedLevelForProfile(webrtc::H264Profile::kProfileConstrainedHigh);
if (profile != nil) {
return profile;
}
@ -95,7 +97,8 @@ NSString *MaxSupportedProfileLevelConstrainedHigh() {
self.hexString = hexString;
std::optional<webrtc::H264ProfileLevelId> profile_level_id =
webrtc::ParseH264ProfileLevelId([hexString cStringUsingEncoding:NSUTF8StringEncoding]);
webrtc::ParseH264ProfileLevelId(
[hexString cStringUsingEncoding:NSUTF8StringEncoding]);
if (profile_level_id.has_value()) {
self.profile = static_cast<RTCH264Profile>(profile_level_id->profile);
self.level = static_cast<RTCH264Level>(profile_level_id->level);
@ -104,17 +107,18 @@ NSString *MaxSupportedProfileLevelConstrainedHigh() {
return self;
}
- (instancetype)initWithProfile:(RTCH264Profile)profile level:(RTCH264Level)level {
- (instancetype)initWithProfile:(RTCH264Profile)profile
level:(RTCH264Level)level {
self = [super init];
if (self) {
self.profile = profile;
self.level = level;
std::optional<std::string> hex_string =
webrtc::H264ProfileLevelIdToString(webrtc::H264ProfileLevelId(
static_cast<webrtc::H264Profile>(profile), static_cast<webrtc::H264Level>(level)));
self.hexString =
[NSString stringWithCString:hex_string.value_or("").c_str() encoding:NSUTF8StringEncoding];
std::optional<std::string> hex_string = webrtc::H264ProfileLevelIdToString(
webrtc::H264ProfileLevelId(static_cast<webrtc::H264Profile>(profile),
static_cast<webrtc::H264Level>(level)));
self.hexString = [NSString stringWithCString:hex_string.value_or("").c_str()
encoding:NSUTF8StringEncoding];
}
return self;
}

View File

@ -32,7 +32,8 @@
// Struct that we pass to the decoder per frame to decode. We receive it again
// in the decoder callback.
struct RTCFrameDecodeParams {
RTCFrameDecodeParams(RTCVideoDecoderCallback cb, int64_t ts) : callback(cb), timestamp(ts) {}
RTCFrameDecodeParams(RTCVideoDecoderCallback cb, int64_t ts)
: callback(cb), timestamp(ts) {}
RTCVideoDecoderCallback callback;
int64_t timestamp;
};
@ -62,10 +63,11 @@ void decompressionOutputCallback(void *decoderRef,
// TODO(tkchin): Handle CVO properly.
RTC_OBJC_TYPE(RTCCVPixelBuffer) *frameBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:imageBuffer];
RTC_OBJC_TYPE(RTCVideoFrame) *decodedFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc]
initWithBuffer:frameBuffer
rotation:RTCVideoRotation_0
timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec];
RTC_OBJC_TYPE(RTCVideoFrame) *decodedFrame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc]
initWithBuffer:frameBuffer
rotation:RTCVideoRotation_0
timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec];
decodedFrame.timeStamp = decodeParams->timestamp;
decodeParams->callback(decodedFrame);
}
@ -112,8 +114,8 @@ void decompressionOutputCallback(void *decoderRef,
}
rtc::ScopedCFTypeRef<CMVideoFormatDescriptionRef> inputFormat =
rtc::ScopedCF(webrtc::CreateVideoFormatDescription((uint8_t *)inputImage.buffer.bytes,
inputImage.buffer.length));
rtc::ScopedCF(webrtc::CreateVideoFormatDescription(
(uint8_t *)inputImage.buffer.bytes, inputImage.buffer.length));
if (inputFormat) {
// Check if the video format has changed, and reinitialize decoder if
// needed.
@ -135,29 +137,41 @@ void decompressionOutputCallback(void *decoderRef,
return WEBRTC_VIDEO_CODEC_ERROR;
}
CMSampleBufferRef sampleBuffer = nullptr;
if (!webrtc::H264AnnexBBufferToCMSampleBuffer((uint8_t *)inputImage.buffer.bytes,
inputImage.buffer.length,
_videoFormat,
&sampleBuffer,
_memoryPool)) {
if (!webrtc::H264AnnexBBufferToCMSampleBuffer(
(uint8_t *)inputImage.buffer.bytes,
inputImage.buffer.length,
_videoFormat,
&sampleBuffer,
_memoryPool)) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
RTC_DCHECK(sampleBuffer);
VTDecodeFrameFlags decodeFlags = kVTDecodeFrame_EnableAsynchronousDecompression;
VTDecodeFrameFlags decodeFlags =
kVTDecodeFrame_EnableAsynchronousDecompression;
std::unique_ptr<RTCFrameDecodeParams> frameDecodeParams;
frameDecodeParams.reset(new RTCFrameDecodeParams(_callback, inputImage.timeStamp));
OSStatus status = VTDecompressionSessionDecodeFrame(
_decompressionSession, sampleBuffer, decodeFlags, frameDecodeParams.release(), nullptr);
frameDecodeParams.reset(
new RTCFrameDecodeParams(_callback, inputImage.timeStamp));
OSStatus status =
VTDecompressionSessionDecodeFrame(_decompressionSession,
sampleBuffer,
decodeFlags,
frameDecodeParams.release(),
nullptr);
#if defined(WEBRTC_IOS)
// Re-initialize the decoder if we have an invalid session while the app is
// active or decoder malfunctions and retry the decode request.
if ((status == kVTInvalidSessionErr || status == kVTVideoDecoderMalfunctionErr) &&
if ((status == kVTInvalidSessionErr ||
status == kVTVideoDecoderMalfunctionErr) &&
[self resetDecompressionSession] == WEBRTC_VIDEO_CODEC_OK) {
RTC_LOG(LS_INFO) << "Failed to decode frame with code: " << status
<< " retrying decode after decompression session reset";
frameDecodeParams.reset(new RTCFrameDecodeParams(_callback, inputImage.timeStamp));
status = VTDecompressionSessionDecodeFrame(
_decompressionSession, sampleBuffer, decodeFlags, frameDecodeParams.release(), nullptr);
frameDecodeParams.reset(
new RTCFrameDecodeParams(_callback, inputImage.timeStamp));
status = VTDecompressionSessionDecodeFrame(_decompressionSession,
sampleBuffer,
decodeFlags,
frameDecodeParams.release(),
nullptr);
}
#endif
CFRelease(sampleBuffer);
@ -214,19 +228,21 @@ void decompressionOutputCallback(void *decoderRef,
#if !(TARGET_OS_SIMULATOR)
(NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{},
#endif
(NSString *)
kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
(NSString *)kCVPixelBufferPixelFormatTypeKey :
@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
};
VTDecompressionOutputCallbackRecord record = {
decompressionOutputCallback, (__bridge void *)self,
decompressionOutputCallback,
(__bridge void *)self,
};
OSStatus status = VTDecompressionSessionCreate(nullptr,
_videoFormat,
nullptr,
(__bridge CFDictionaryRef)attributes,
&record,
&_decompressionSession);
OSStatus status =
VTDecompressionSessionCreate(nullptr,
_videoFormat,
nullptr,
(__bridge CFDictionaryRef)attributes,
&record,
&_decompressionSession);
if (status != noErr) {
RTC_LOG(LS_ERROR) << "Failed to create decompression session: " << status;
[self destroyDecompressionSession];
@ -240,7 +256,9 @@ void decompressionOutputCallback(void *decoderRef,
- (void)configureDecompressionSession {
RTC_DCHECK(_decompressionSession);
#if defined(WEBRTC_IOS)
VTSessionSetProperty(_decompressionSession, kVTDecompressionPropertyKey_RealTime, kCFBooleanTrue);
VTSessionSetProperty(_decompressionSession,
kVTDecompressionPropertyKey_RealTime,
kCFBooleanTrue);
#endif
}

View File

@ -41,10 +41,12 @@
@interface RTC_OBJC_TYPE (RTCVideoEncoderH264)
()
- (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags sampleBuffer
- (void)frameWasEncoded : (OSStatus)status flags
: (VTEncodeInfoFlags)infoFlags sampleBuffer
: (CMSampleBufferRef)sampleBuffer codecSpecificInfo
: (id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo width : (int32_t)width height
: (int32_t)height renderTimeMs : (int64_t)renderTimeMs timestamp : (uint32_t)timestamp rotation
: (id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo width
: (int32_t)width height : (int32_t)height renderTimeMs
: (int64_t)renderTimeMs timestamp : (uint32_t)timestamp rotation
: (RTCVideoRotation)rotation;
@end
@ -72,11 +74,17 @@ struct RTCFrameEncodeParams {
int64_t rtms,
uint32_t ts,
RTCVideoRotation r)
: encoder(e), width(w), height(h), render_time_ms(rtms), timestamp(ts), rotation(r) {
: encoder(e),
width(w),
height(h),
render_time_ms(rtms),
timestamp(ts),
rotation(r) {
if (csi) {
codecSpecificInfo = csi;
} else {
codecSpecificInfo = [[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) alloc] init];
codecSpecificInfo =
[[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) alloc] init];
}
}
@ -92,21 +100,26 @@ struct RTCFrameEncodeParams {
// We receive I420Frames as input, but we need to feed CVPixelBuffers into the
// encoder. This performs the copy and format conversion.
// TODO(tkchin): See if encoder will accept i420 frames and compare performance.
bool CopyVideoFrameToNV12PixelBuffer(id<RTC_OBJC_TYPE(RTCI420Buffer)> frameBuffer,
CVPixelBufferRef pixelBuffer) {
bool CopyVideoFrameToNV12PixelBuffer(
id<RTC_OBJC_TYPE(RTCI420Buffer)> frameBuffer,
CVPixelBufferRef pixelBuffer) {
RTC_DCHECK(pixelBuffer);
RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer), kNV12PixelFormat);
RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixelBuffer, 0), frameBuffer.height);
RTC_DCHECK_EQ(CVPixelBufferGetWidthOfPlane(pixelBuffer, 0), frameBuffer.width);
RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixelBuffer, 0),
frameBuffer.height);
RTC_DCHECK_EQ(CVPixelBufferGetWidthOfPlane(pixelBuffer, 0),
frameBuffer.width);
CVReturn cvRet = CVPixelBufferLockBaseAddress(pixelBuffer, 0);
if (cvRet != kCVReturnSuccess) {
RTC_LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
return false;
}
uint8_t *dstY = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
uint8_t *dstY = reinterpret_cast<uint8_t *>(
CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
int dstStrideY = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
uint8_t *dstUV = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
uint8_t *dstUV = reinterpret_cast<uint8_t *>(
CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
int dstStrideUV = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
// Convert I420 to NV12.
int ret = libyuv::I420ToNV12(frameBuffer.dataY,
@ -129,7 +142,8 @@ bool CopyVideoFrameToNV12PixelBuffer(id<RTC_OBJC_TYPE(RTCI420Buffer)> frameBuffe
return true;
}
CVPixelBufferRef CreatePixelBuffer(VTCompressionSessionRef compression_session) {
CVPixelBufferRef CreatePixelBuffer(
VTCompressionSessionRef compression_session) {
if (!compression_session) {
RTC_LOG(LS_ERROR) << "Failed to get compression session.";
return nullptr;
@ -142,7 +156,8 @@ CVPixelBufferRef CreatePixelBuffer(VTCompressionSessionRef compression_session)
return nullptr;
}
CVPixelBufferRef pixel_buffer;
CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nullptr, pixel_buffer_pool, &pixel_buffer);
CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(
nullptr, pixel_buffer_pool, &pixel_buffer);
if (ret != kCVReturnSuccess) {
RTC_LOG(LS_ERROR) << "Failed to create pixel buffer: " << ret;
// We probably want to drop frames here, since failure probably means
@ -160,7 +175,8 @@ void compressionOutputCallback(void *encoder,
VTEncodeInfoFlags infoFlags,
CMSampleBufferRef sampleBuffer) {
if (!params) {
// If there are pending callbacks when the encoder is destroyed, this can happen.
// If there are pending callbacks when the encoder is destroyed, this can
// happen.
return;
}
std::unique_ptr<RTCFrameEncodeParams> encodeParams(
@ -282,9 +298,11 @@ CFStringRef ExtractProfile(const webrtc::H264ProfileLevelId &profile_level_id) {
// The function returns the max allowed sample rate (pixels per second) that
// can be processed by given encoder with `profile_level_id`.
// See https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-H.264-201610-S!!PDF-E&type=items
// See
// https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-H.264-201610-S!!PDF-E&type=items
// for details.
NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id) {
NSUInteger GetMaxSampleRate(
const webrtc::H264ProfileLevelId &profile_level_id) {
switch (profile_level_id.level) {
case webrtc::H264Level::kLevel3:
return 10368000;
@ -343,16 +361,18 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
// drastically reduced bitrate, so we want to avoid that. In steady state
// conditions, 0.95 seems to give us better overall bitrate over long periods
// of time.
- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo {
- (instancetype)initWithCodecInfo:
(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo {
self = [super init];
if (self) {
_codecInfo = codecInfo;
_bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95));
_packetizationMode = RTCH264PacketizationModeNonInterleaved;
_profile_level_id =
webrtc::ParseSdpForH264ProfileLevelId([codecInfo nativeSdpVideoFormat].parameters);
_profile_level_id = webrtc::ParseSdpForH264ProfileLevelId(
[codecInfo nativeSdpVideoFormat].parameters);
RTC_DCHECK(_profile_level_id);
RTC_LOG(LS_INFO) << "Using profile " << CFStringToString(ExtractProfile(*_profile_level_id));
RTC_LOG(LS_INFO) << "Using profile "
<< CFStringToString(ExtractProfile(*_profile_level_id));
RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name]);
}
return self;
@ -362,7 +382,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
[self destroyCompressionSession];
}
- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
- (NSInteger)startEncodeWithSettings:
(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
numberOfCores:(int)numberOfCores {
RTC_DCHECK(settings);
RTC_DCHECK([settings.name isEqualToString:kRTCVideoCodecH264Name]);
@ -373,17 +394,19 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
uint32_t aligned_width = (((_width + 15) >> 4) << 4);
uint32_t aligned_height = (((_height + 15) >> 4) << 4);
_maxAllowedFrameRate = static_cast<uint32_t>(GetMaxSampleRate(*_profile_level_id) /
(aligned_width * aligned_height));
_maxAllowedFrameRate = static_cast<uint32_t>(
GetMaxSampleRate(*_profile_level_id) / (aligned_width * aligned_height));
// We can only set average bitrate on the HW encoder.
_targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps.
_bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps);
_encoderFrameRate = MIN(settings.maxFramerate, _maxAllowedFrameRate);
if (settings.maxFramerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) {
RTC_LOG(LS_WARNING) << "Initial encoder frame rate setting " << settings.maxFramerate
<< " is larger than the "
<< "maximal allowed frame rate " << _maxAllowedFrameRate << ".";
if (settings.maxFramerate > _maxAllowedFrameRate &&
_maxAllowedFrameRate > 0) {
RTC_LOG(LS_WARNING) << "Initial encoder frame rate setting "
<< settings.maxFramerate << " is larger than the "
<< "maximal allowed frame rate " << _maxAllowedFrameRate
<< ".";
}
// TODO(tkchin): Try setting payload size via
@ -393,7 +416,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
}
- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame
codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo
codecSpecificInfo:
(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo
frameTypes:(NSArray<NSNumber *> *)frameTypes {
if (!_callback || !_compressionSession) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
@ -427,13 +451,15 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
int dstHeight = CVPixelBufferGetHeight(pixelBuffer);
if ([rtcPixelBuffer requiresScalingToWidth:dstWidth height:dstHeight]) {
int size =
[rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth height:dstHeight];
[rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth
height:dstHeight];
_frameScaleBuffer.resize(size);
} else {
_frameScaleBuffer.clear();
}
_frameScaleBuffer.shrink_to_fit();
if (![rtcPixelBuffer cropAndScaleTo:pixelBuffer withTempBuffer:_frameScaleBuffer.data()]) {
if (![rtcPixelBuffer cropAndScaleTo:pixelBuffer
withTempBuffer:_frameScaleBuffer.data()]) {
CVBufferRelease(pixelBuffer);
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -467,7 +493,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
}
}
CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000);
CMTime presentationTimeStamp =
CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000);
CFDictionaryRef frameProperties = nullptr;
if (isKeyframeRequired) {
CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame};
@ -476,17 +503,19 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
}
std::unique_ptr<RTCFrameEncodeParams> encodeParams;
encodeParams.reset(new RTCFrameEncodeParams(self,
codecSpecificInfo,
_width,
_height,
frame.timeStampNs / rtc::kNumNanosecsPerMillisec,
frame.timeStamp,
frame.rotation));
encodeParams.reset(
new RTCFrameEncodeParams(self,
codecSpecificInfo,
_width,
_height,
frame.timeStampNs / rtc::kNumNanosecsPerMillisec,
frame.timeStamp,
frame.rotation));
encodeParams->codecSpecificInfo.packetizationMode = _packetizationMode;
// Update the bitrate if needed.
[self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:_encoderFrameRate];
[self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps()
frameRate:_encoderFrameRate];
OSStatus status = VTCompressionSessionEncodeFrame(_compressionSession,
pixelBuffer,
@ -505,14 +534,16 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
if (status == kVTInvalidSessionErr) {
// This error occurs when entering foreground after backgrounding the app.
RTC_LOG(LS_ERROR) << "Invalid compression session, resetting.";
[self resetCompressionSessionWithPixelFormat:[self pixelFormatOfFrame:frame]];
[self
resetCompressionSessionWithPixelFormat:[self pixelFormatOfFrame:frame]];
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
} else if (status == kVTVideoEncoderMalfunctionErr) {
// Sometimes the encoder malfunctions and needs to be restarted.
RTC_LOG(LS_ERROR)
<< "Encountered video encoder malfunction error. Resetting compression session.";
[self resetCompressionSessionWithPixelFormat:[self pixelFormatOfFrame:frame]];
RTC_LOG(LS_ERROR) << "Encountered video encoder malfunction error. "
"Resetting compression session.";
[self
resetCompressionSessionWithPixelFormat:[self pixelFormatOfFrame:frame]];
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
} else if (status != noErr) {
@ -530,11 +561,14 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
_targetBitrateBps = 1000 * bitrateKbit;
_bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps);
if (framerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) {
RTC_LOG(LS_WARNING) << "Encoder frame rate setting " << framerate << " is larger than the "
<< "maximal allowed frame rate " << _maxAllowedFrameRate << ".";
RTC_LOG(LS_WARNING) << "Encoder frame rate setting " << framerate
<< " is larger than the "
<< "maximal allowed frame rate " << _maxAllowedFrameRate
<< ".";
}
framerate = MIN(framerate, _maxAllowedFrameRate);
[self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:framerate];
[self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps()
frameRate:framerate];
return WEBRTC_VIDEO_CODEC_OK;
}
@ -572,28 +606,31 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
return kNV12PixelFormat;
}
- (BOOL)resetCompressionSessionIfNeededWithFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
- (BOOL)resetCompressionSessionIfNeededWithFrame:
(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
BOOL resetCompressionSession = NO;
// If we're capturing native frames in another pixel format than the compression session is
// configured with, make sure the compression session is reset using the correct pixel format.
// If we're capturing native frames in another pixel format than the
// compression session is configured with, make sure the compression session
// is reset using the correct pixel format.
OSType framePixelFormat = [self pixelFormatOfFrame:frame];
if (_compressionSession) {
// The pool attribute `kCVPixelBufferPixelFormatTypeKey` can contain either an array of pixel
// formats or a single pixel format.
// The pool attribute `kCVPixelBufferPixelFormatTypeKey` can contain either
// an array of pixel formats or a single pixel format.
CVPixelBufferPoolRef pixelBufferPool =
VTCompressionSessionGetPixelBufferPool(_compressionSession);
if (!pixelBufferPool) {
// If we have a compression session but can't acquire the pixel buffer pool, we're in an
// invalid state and should reset.
// If we have a compression session but can't acquire the pixel buffer
// pool, we're in an invalid state and should reset.
resetCompressionSession = YES;
} else {
NSDictionary *poolAttributes =
(__bridge NSDictionary *)CVPixelBufferPoolGetPixelBufferAttributes(pixelBufferPool);
id pixelFormats =
[poolAttributes objectForKey:(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey];
(__bridge NSDictionary *)CVPixelBufferPoolGetPixelBufferAttributes(
pixelBufferPool);
id pixelFormats = [poolAttributes
objectForKey:(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey];
NSArray<NSNumber *> *compressionSessionPixelFormats = nil;
if ([pixelFormats isKindOfClass:[NSArray class]]) {
compressionSessionPixelFormats = (NSArray *)pixelFormats;
@ -604,7 +641,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
if (![compressionSessionPixelFormats
containsObject:[NSNumber numberWithLong:framePixelFormat]]) {
resetCompressionSession = YES;
RTC_LOG(LS_INFO) << "Resetting compression session due to non-matching pixel format.";
RTC_LOG(LS_INFO) << "Resetting compression session due to non-matching "
"pixel format.";
}
}
} else {
@ -639,7 +677,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
// Currently hw accl is supported above 360p on mac, below 360p
// the compression session will be created with hw accl disabled.
encoder_specs = @{
(NSString *)kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder : @(YES),
(NSString *)
kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder : @(YES),
};
#endif
@ -648,7 +687,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
_width,
_height,
kCMVideoCodecType_H264,
(__bridge CFDictionaryRef)encoder_specs, // use hardware accelerated encoder if available
(__bridge CFDictionaryRef)
encoder_specs, // use hardware accelerated encoder if available
(__bridge CFDictionaryRef)sourceAttributes,
nullptr, // use default compressed data allocator
compressionOutputCallback,
@ -660,10 +700,11 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
}
#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
CFBooleanRef hwaccl_enabled = nullptr;
status = VTSessionCopyProperty(_compressionSession,
kVTCompressionPropertyKey_UsingHardwareAcceleratedVideoEncoder,
nullptr,
&hwaccl_enabled);
status = VTSessionCopyProperty(
_compressionSession,
kVTCompressionPropertyKey_UsingHardwareAcceleratedVideoEncoder,
nullptr,
&hwaccl_enabled);
if (status == noErr && (CFBooleanGetValue(hwaccl_enabled))) {
RTC_LOG(LS_INFO) << "Compression session created with hw accl enabled";
} else {
@ -677,11 +718,14 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
- (void)configureCompressionSession {
RTC_DCHECK(_compressionSession);
SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, true);
SetVTSessionProperty(
_compressionSession, kVTCompressionPropertyKey_RealTime, true);
SetVTSessionProperty(_compressionSession,
kVTCompressionPropertyKey_ProfileLevel,
ExtractProfile(*_profile_level_id));
SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, false);
SetVTSessionProperty(_compressionSession,
kVTCompressionPropertyKey_AllowFrameReordering,
false);
[self setEncoderBitrateBps:_targetBitrateBps frameRate:_encoderFrameRate];
// TODO(tkchin): Look at entropy mode and colorspace matrices.
// TODO(tkchin): Investigate to see if there's any way to make this work.
@ -691,10 +735,13 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
// kVTCompressionPropertyKey_MaxFrameDelayCount,
// 1);
// Set a relatively large value for keyframe emission (7200 frames or 4 minutes).
SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, 7200);
// Set a relatively large value for keyframe emission (7200 frames or 4
// minutes).
SetVTSessionProperty(
_compressionSession, kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration, 240);
_compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, 7200);
SetVTSessionProperty(_compressionSession,
kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration,
240);
}
- (void)destroyCompressionSession {
@ -715,28 +762,37 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
}
}
- (void)setEncoderBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate {
- (void)setEncoderBitrateBps:(uint32_t)bitrateBps
frameRate:(uint32_t)frameRate {
if (_compressionSession) {
SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AverageBitRate, bitrateBps);
SetVTSessionProperty(_compressionSession,
kVTCompressionPropertyKey_AverageBitRate,
bitrateBps);
// With zero `_maxAllowedFrameRate`, we fall back to automatic frame rate detection.
// With zero `_maxAllowedFrameRate`, we fall back to automatic frame rate
// detection.
if (_maxAllowedFrameRate > 0) {
SetVTSessionProperty(
_compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, frameRate);
SetVTSessionProperty(_compressionSession,
kVTCompressionPropertyKey_ExpectedFrameRate,
frameRate);
}
// TODO(tkchin): Add a helper method to set array value.
int64_t dataLimitBytesPerSecondValue =
static_cast<int64_t>(bitrateBps * kLimitToAverageBitRateFactor / 8);
CFNumberRef bytesPerSecond =
CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &dataLimitBytesPerSecondValue);
CFNumberRef bytesPerSecond = CFNumberCreate(kCFAllocatorDefault,
kCFNumberSInt64Type,
&dataLimitBytesPerSecondValue);
int64_t oneSecondValue = 1;
CFNumberRef oneSecond =
CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &oneSecondValue);
CFNumberRef oneSecond = CFNumberCreate(
kCFAllocatorDefault, kCFNumberSInt64Type, &oneSecondValue);
const void *nums[2] = {bytesPerSecond, oneSecond};
CFArrayRef dataRateLimits = CFArrayCreate(nullptr, nums, 2, &kCFTypeArrayCallBacks);
OSStatus status = VTSessionSetProperty(
_compressionSession, kVTCompressionPropertyKey_DataRateLimits, dataRateLimits);
CFArrayRef dataRateLimits =
CFArrayCreate(nullptr, nums, 2, &kCFTypeArrayCallBacks);
OSStatus status =
VTSessionSetProperty(_compressionSession,
kVTCompressionPropertyKey_DataRateLimits,
dataRateLimits);
if (bytesPerSecond) {
CFRelease(bytesPerSecond);
}
@ -747,7 +803,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
CFRelease(dataRateLimits);
}
if (status != noErr) {
RTC_LOG(LS_ERROR) << "Failed to set data rate limit with code: " << status;
RTC_LOG(LS_ERROR) << "Failed to set data rate limit with code: "
<< status;
}
_encoderBitrateBps = bitrateBps;
@ -758,7 +815,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
- (void)frameWasEncoded:(OSStatus)status
flags:(VTEncodeInfoFlags)infoFlags
sampleBuffer:(CMSampleBufferRef)sampleBuffer
codecSpecificInfo:(id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo
codecSpecificInfo:
(id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo
width:(int32_t)width
height:(int32_t)height
renderTimeMs:(int64_t)renderTimeMs
@ -778,11 +836,13 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
}
BOOL isKeyframe = NO;
CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, 0);
CFArrayRef attachments =
CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, 0);
if (attachments != nullptr && CFArrayGetCount(attachments)) {
CFDictionaryRef attachment =
static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(attachments, 0));
isKeyframe = !CFDictionaryContainsKey(attachment, kCMSampleAttachmentKey_NotSync);
isKeyframe =
!CFDictionaryContainsKey(attachment, kCMSampleAttachmentKey_NotSync);
}
if (isKeyframe) {
@ -790,25 +850,31 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
}
__block std::unique_ptr<rtc::Buffer> buffer = std::make_unique<rtc::Buffer>();
if (!webrtc::H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get())) {
if (!webrtc::H264CMSampleBufferToAnnexBBuffer(
sampleBuffer, isKeyframe, buffer.get())) {
return;
}
RTC_OBJC_TYPE(RTCEncodedImage) *frame = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init];
// This assumes ownership of `buffer` and is responsible for freeing it when done.
frame.buffer = [[NSData alloc] initWithBytesNoCopy:buffer->data()
length:buffer->size()
deallocator:^(void *bytes, NSUInteger size) {
buffer.reset();
}];
RTC_OBJC_TYPE(RTCEncodedImage) *frame =
[[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init];
// This assumes ownership of `buffer` and is responsible for freeing it when
// done.
frame.buffer =
[[NSData alloc] initWithBytesNoCopy:buffer->data()
length:buffer->size()
deallocator:^(void *bytes, NSUInteger size) {
buffer.reset();
}];
frame.encodedWidth = width;
frame.encodedHeight = height;
frame.frameType = isKeyframe ? RTCFrameTypeVideoFrameKey : RTCFrameTypeVideoFrameDelta;
frame.frameType =
isKeyframe ? RTCFrameTypeVideoFrameKey : RTCFrameTypeVideoFrameDelta;
frame.captureTimeMs = renderTimeMs;
frame.timeStamp = timestamp;
frame.rotation = rotation;
frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoContentTypeScreenshare :
RTCVideoContentTypeUnspecified;
frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ?
RTCVideoContentTypeScreenshare :
RTCVideoContentTypeUnspecified;
frame.flags = webrtc::VideoSendTiming::kInvalid;
_h264BitstreamParser.ParseBitstream(*buffer);

View File

@ -53,7 +53,8 @@ constexpr SupportedH264Profile kH264MaxSupportedProfiles[] = {
{"iPhone13,1", /* https://support.apple.com/kb/SP829 iPhone 12 mini */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPhone12,8", /* https://support.apple.com/kb/SP820 iPhone SE (2nd generation) */
{"iPhone12,8", /* https://support.apple.com/kb/SP820 iPhone SE (2nd
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPhone12,5", /* https://support.apple.com/kb/SP806 iPhone 11 Pro Max */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
@ -111,52 +112,74 @@ constexpr SupportedH264Profile kH264MaxSupportedProfiles[] = {
{H264Profile::kProfileHigh, H264Level::kLevel4_1}},
// iPods with at least iOS 12
{"iPod9,1", /* https://support.apple.com/kb/SP796 iPod touch (7th generation) */
{"iPod9,1", /* https://support.apple.com/kb/SP796 iPod touch (7th
generation) */
{H264Profile::kProfileMain, H264Level::kLevel4_1}},
{"iPod7,1", /* https://support.apple.com/kb/SP720 iPod touch (6th generation) */
{"iPod7,1", /* https://support.apple.com/kb/SP720 iPod touch (6th
generation) */
{H264Profile::kProfileMain, H264Level::kLevel4_1}},
// iPads with at least iOS 12
{"iPad14,6", /* https://support.apple.com/kb/SP883 iPad Pro 12.9-inch (6th generation) */
{"iPad14,6", /* https://support.apple.com/kb/SP883 iPad Pro 12.9-inch (6th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad14,5", /* https://support.apple.com/kb/SP883 iPad Pro 12.9-inch (6th generation) */
{"iPad14,5", /* https://support.apple.com/kb/SP883 iPad Pro 12.9-inch (6th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad14,4", /* https://support.apple.com/kb/SP882 iPad Pro 11-inch (4th generation) */
{"iPad14,4", /* https://support.apple.com/kb/SP882 iPad Pro 11-inch (4th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad14,3", /* https://support.apple.com/kb/SP882 iPad Pro 11-inch (4th generation) */
{"iPad14,3", /* https://support.apple.com/kb/SP882 iPad Pro 11-inch (4th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad14,2", /* https://support.apple.com/kb/SP850 iPad mini (6th generation) */
{"iPad14,2", /* https://support.apple.com/kb/SP850 iPad mini (6th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad14,1", /* https://support.apple.com/kb/SP850 iPad mini (6th generation) */
{"iPad14,1", /* https://support.apple.com/kb/SP850 iPad mini (6th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad13,19", /* https://support.apple.com/kb/SP884 iPad (10th generation) */
{"iPad13,19", /* https://support.apple.com/kb/SP884 iPad (10th generation)
*/
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad13,18", /* https://support.apple.com/kb/SP884 iPad (10th generation) */
{"iPad13,18", /* https://support.apple.com/kb/SP884 iPad (10th generation)
*/
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad13,17", /* https://support.apple.com/kb/SP866 iPad Air (5th generation) */
{"iPad13,17", /* https://support.apple.com/kb/SP866 iPad Air (5th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad13,16", /* https://support.apple.com/kb/SP866 iPad Air (5th generation) */
{"iPad13,16", /* https://support.apple.com/kb/SP866 iPad Air (5th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad13,11", /* https://support.apple.com/kb/SP844 iPad Pro, 12.9-inch (5th generation) */
{"iPad13,11", /* https://support.apple.com/kb/SP844 iPad Pro, 12.9-inch (5th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad13,10", /* https://support.apple.com/kb/SP844 iPad Pro, 12.9-inch (5th generation) */
{"iPad13,10", /* https://support.apple.com/kb/SP844 iPad Pro, 12.9-inch (5th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad13,9", /* https://support.apple.com/kb/SP844 iPad Pro, 12.9-inch (5th generation) */
{"iPad13,9", /* https://support.apple.com/kb/SP844 iPad Pro, 12.9-inch (5th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad13,8", /* https://support.apple.com/kb/SP844 iPad Pro, 12.9-inch (5th generation) */
{"iPad13,8", /* https://support.apple.com/kb/SP844 iPad Pro, 12.9-inch (5th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad13,7", /* https://support.apple.com/kb/SP843 iPad Pro, 11-inch (3rd generation) */
{"iPad13,7", /* https://support.apple.com/kb/SP843 iPad Pro, 11-inch (3rd
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad13,6", /* https://support.apple.com/kb/SP843 iPad Pro, 11-inch (3rd generation) */
{"iPad13,6", /* https://support.apple.com/kb/SP843 iPad Pro, 11-inch (3rd
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad13,5", /* https://support.apple.com/kb/SP843 iPad Pro, 11-inch (3rd generation) */
{"iPad13,5", /* https://support.apple.com/kb/SP843 iPad Pro, 11-inch (3rd
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad13,4", /* https://support.apple.com/kb/SP843 iPad Pro, 11-inch (3rd generation) */
{"iPad13,4", /* https://support.apple.com/kb/SP843 iPad Pro, 11-inch (3rd
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad13,2", /* https://support.apple.com/kb/SP828 iPad Air (4th generation) */
{"iPad13,2", /* https://support.apple.com/kb/SP828 iPad Air (4th generation)
*/
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad13,1", /* https://support.apple.com/kb/SP828 iPad Air (4th generation) */
{"iPad13,1", /* https://support.apple.com/kb/SP828 iPad Air (4th generation)
*/
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad12,2", /* https://support.apple.com/kb/SP849 iPad (9th generation) */
@ -168,38 +191,54 @@ constexpr SupportedH264Profile kH264MaxSupportedProfiles[] = {
{H264Profile::kProfileHigh, H264Level::kLevel4_1}},
{"iPad11,6", /* https://support.apple.com/kb/SP822 iPad (8th generation) */
{H264Profile::kProfileHigh, H264Level::kLevel4_1}},
{"iPad11,4", /* https://support.apple.com/kb/SP787 iPad Air (3rd generation) */
{"iPad11,4", /* https://support.apple.com/kb/SP787 iPad Air (3rd generation)
*/
{H264Profile::kProfileHigh, H264Level::kLevel4_1}},
{"iPad11,3", /* https://support.apple.com/kb/SP787 iPad Air (3rd generation) */
{"iPad11,3", /* https://support.apple.com/kb/SP787 iPad Air (3rd generation)
*/
{H264Profile::kProfileHigh, H264Level::kLevel4_1}},
{"iPad11,2", /* https://support.apple.com/kb/SP788 iPad mini (5th generation) */
{"iPad11,2", /* https://support.apple.com/kb/SP788 iPad mini (5th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel4_1}},
{"iPad11,1", /* https://support.apple.com/kb/SP788 iPad mini (5th generation) */
{"iPad11,1", /* https://support.apple.com/kb/SP788 iPad mini (5th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel4_1}},
{"iPad8,12", /* https://support.apple.com/kb/SP815 iPad Pro 12.9-inch (4th generation) */
{"iPad8,12", /* https://support.apple.com/kb/SP815 iPad Pro 12.9-inch (4th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad8,11", /* https://support.apple.com/kb/SP815 iPad Pro 12.9-inch (4th generation) */
{"iPad8,11", /* https://support.apple.com/kb/SP815 iPad Pro 12.9-inch (4th
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad8,10", /* https://support.apple.com/kb/SP814 iPad Pro 11-inch (2nd generation) */
{"iPad8,10", /* https://support.apple.com/kb/SP814 iPad Pro 11-inch (2nd
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad8,9", /* https://support.apple.com/kb/SP814 iPad Pro 11-inch (2nd generation) */
{"iPad8,9", /* https://support.apple.com/kb/SP814 iPad Pro 11-inch (2nd
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad8,8", /* https://support.apple.com/kb/SP785 iPad Pro 12.9-inch (3rd generation) */
{"iPad8,8", /* https://support.apple.com/kb/SP785 iPad Pro 12.9-inch (3rd
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad8,7", /* https://support.apple.com/kb/SP785 iPad Pro 12.9-inch (3rd generation) */
{"iPad8,7", /* https://support.apple.com/kb/SP785 iPad Pro 12.9-inch (3rd
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad8,6", /* https://support.apple.com/kb/SP785 iPad Pro 12.9-inch (3rd generation) */
{"iPad8,6", /* https://support.apple.com/kb/SP785 iPad Pro 12.9-inch (3rd
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad8,5", /* https://support.apple.com/kb/SP785 iPad Pro 12.9-inch (3rd generation) */
{"iPad8,5", /* https://support.apple.com/kb/SP785 iPad Pro 12.9-inch (3rd
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad8,4", /* https://support.apple.com/kb/SP784 iPad Pro 11-inch (1st generation) */
{"iPad8,4", /* https://support.apple.com/kb/SP784 iPad Pro 11-inch (1st
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad8,3", /* https://support.apple.com/kb/SP784 iPad Pro 11-inch (1st generation) */
{"iPad8,3", /* https://support.apple.com/kb/SP784 iPad Pro 11-inch (1st
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad8,2", /* https://support.apple.com/kb/SP784 iPad Pro 11-inch (1st generation) */
{"iPad8,2", /* https://support.apple.com/kb/SP784 iPad Pro 11-inch (1st
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad8,1", /* https://support.apple.com/kb/SP784 iPad Pro 11-inch (1st generation) */
{"iPad8,1", /* https://support.apple.com/kb/SP784 iPad Pro 11-inch (1st
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_2}},
{"iPad7,12", /* https://support.apple.com/kb/SP807 iPad (7th generation) */
@ -214,9 +253,11 @@ constexpr SupportedH264Profile kH264MaxSupportedProfiles[] = {
{H264Profile::kProfileHigh, H264Level::kLevel5_1}},
{"iPad7,3", /* https://support.apple.com/kb/SP762 iPad Pro (10.5-inch) */
{H264Profile::kProfileHigh, H264Level::kLevel5_1}},
{"iPad7,2", /* https://support.apple.com/kb/SP761 iPad Pro (12.9-inch) (2nd generation) */
{"iPad7,2", /* https://support.apple.com/kb/SP761 iPad Pro (12.9-inch) (2nd
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_1}},
{"iPad7,1", /* https://support.apple.com/kb/SP761 iPad Pro (12.9-inch) (2nd generation) */
{"iPad7,1", /* https://support.apple.com/kb/SP761 iPad Pro (12.9-inch) (2nd
generation) */
{H264Profile::kProfileHigh, H264Level::kLevel5_1}},
{"iPad6,12", /* https://support.apple.com/kb/SP751 iPad (5th generation) */
@ -247,11 +288,14 @@ constexpr SupportedH264Profile kH264MaxSupportedProfiles[] = {
{H264Profile::kProfileHigh, H264Level::kLevel4_1}},
{"iPad4,7", /* https://support.apple.com/kb/sp709 iPad mini 3 */
{H264Profile::kProfileHigh, H264Level::kLevel4_1}},
{"iPad4,6", /* https://support.apple.com/kb/sp693 iPad mini 2 with Retina display */
{"iPad4,6", /* https://support.apple.com/kb/sp693 iPad mini 2 with Retina
display */
{H264Profile::kProfileHigh, H264Level::kLevel4_1}},
{"iPad4,5", /* https://support.apple.com/kb/sp693 iPad mini 2 with Retina display */
{"iPad4,5", /* https://support.apple.com/kb/sp693 iPad mini 2 with Retina
display */
{H264Profile::kProfileHigh, H264Level::kLevel4_1}},
{"iPad4,4", /* https://support.apple.com/kb/sp693 iPad mini 2 with Retina display */
{"iPad4,4", /* https://support.apple.com/kb/sp693 iPad mini 2 with Retina
display */
{H264Profile::kProfileHigh, H264Level::kLevel4_1}},
{"iPad4,3", /* https://support.apple.com/kb/SP692 iPad Air */
{H264Profile::kProfileHigh, H264Level::kLevel4_1}},
@ -261,22 +305,25 @@ constexpr SupportedH264Profile kH264MaxSupportedProfiles[] = {
{H264Profile::kProfileHigh, H264Level::kLevel4_1}},
};
std::optional<H264ProfileLevelId> FindMaxSupportedProfileForDevice(NSString* machineName) {
const auto* result =
std::find_if(std::begin(kH264MaxSupportedProfiles),
std::end(kH264MaxSupportedProfiles),
[machineName](const SupportedH264Profile& supportedProfile) {
return [machineName isEqualToString:@(supportedProfile.machineName)];
});
std::optional<H264ProfileLevelId> FindMaxSupportedProfileForDevice(
NSString* machineName) {
const auto* result = std::find_if(
std::begin(kH264MaxSupportedProfiles),
std::end(kH264MaxSupportedProfiles),
[machineName](const SupportedH264Profile& supportedProfile) {
return [machineName isEqualToString:@(supportedProfile.machineName)];
});
if (result != std::end(kH264MaxSupportedProfiles)) {
return result->profile;
}
if ([machineName hasPrefix:@"iPhone"] || [machineName hasPrefix:@"iPad"]) {
H264ProfileLevelId fallbackProfile{H264Profile::kProfileHigh, H264Level::kLevel4_1};
H264ProfileLevelId fallbackProfile{H264Profile::kProfileHigh,
H264Level::kLevel4_1};
return fallbackProfile;
}
if ([machineName hasPrefix:@"iPod"]) {
H264ProfileLevelId fallbackProfile{H264Profile::kProfileMain, H264Level::kLevel4_1};
H264ProfileLevelId fallbackProfile{H264Profile::kProfileMain,
H264Level::kLevel4_1};
return fallbackProfile;
}
return std::nullopt;

View File

@ -40,9 +40,9 @@ RTC_OBJC_EXPORT
- (BOOL)requiresScalingToWidth:(int)width height:(int)height;
- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height;
/** The minimum size of the `tmpBuffer` must be the number of bytes returned from the
* bufferSizeForCroppingAndScalingToWidth:height: method.
* If that size is 0, the `tmpBuffer` may be nil.
/** The minimum size of the `tmpBuffer` must be the number of bytes returned
* from the bufferSizeForCroppingAndScalingToWidth:height: method. If that size
* is 0, the `tmpBuffer` may be nil.
*/
- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer
withTempBuffer:(nullable uint8_t *)tmpBuffer;

View File

@ -38,11 +38,12 @@
@synthesize cropHeight = _cropHeight;
+ (NSSet<NSNumber*>*)supportedPixelFormats {
return [NSSet setWithObjects:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
@(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
@(kCVPixelFormatType_32BGRA),
@(kCVPixelFormatType_32ARGB),
nil];
return
[NSSet setWithObjects:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
@(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
@(kCVPixelFormatType_32BGRA),
@(kCVPixelFormatType_32ARGB),
nil];
}
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer {
@ -110,7 +111,8 @@
int dstChromaWidth = (width + 1) / 2;
int dstChromaHeight = (height + 1) / 2;
return srcChromaWidth * srcChromaHeight * 2 + dstChromaWidth * dstChromaHeight * 2;
return srcChromaWidth * srcChromaHeight * 2 +
dstChromaWidth * dstChromaHeight * 2;
}
case kCVPixelFormatType_32BGRA:
case kCVPixelFormatType_32ARGB: {
@ -124,7 +126,8 @@
- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer
withTempBuffer:(nullable uint8_t*)tmpBuffer {
const OSType srcPixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
const OSType dstPixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer);
const OSType dstPixelFormat =
CVPixelBufferGetPixelFormatType(outputPixelBuffer);
switch (srcPixelFormat) {
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
@ -132,8 +135,9 @@
size_t dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
size_t dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
if (dstWidth > 0 && dstHeight > 0) {
RTC_DCHECK(dstPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
dstPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
RTC_DCHECK(
dstPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
dstPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
if ([self requiresScalingToWidth:dstWidth height:dstHeight]) {
RTC_DCHECK(tmpBuffer);
}
@ -176,17 +180,20 @@
CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
RTC_OBJC_TYPE(RTCMutableI420Buffer)* i420Buffer =
[[RTC_OBJC_TYPE(RTCMutableI420Buffer) alloc] initWithWidth:[self width] height:[self height]];
[[RTC_OBJC_TYPE(RTCMutableI420Buffer) alloc] initWithWidth:[self width]
height:[self height]];
switch (pixelFormat) {
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
const uint8_t* srcY =
static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
const uint8_t* srcUV =
static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
const uint8_t* srcY = static_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
const int srcYStride =
CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
const uint8_t* srcUV = static_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
const int srcUVStride =
CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
// Crop just by modifying pointers.
srcY += srcYStride * _cropY + _cropX;
@ -215,17 +222,24 @@
CVPixelBufferRef scaledPixelBuffer = NULL;
CVPixelBufferRef sourcePixelBuffer = NULL;
if ([self requiresCropping] ||
[self requiresScalingToWidth:i420Buffer.width height:i420Buffer.height]) {
CVPixelBufferCreate(
NULL, i420Buffer.width, i420Buffer.height, pixelFormat, NULL, &scaledPixelBuffer);
[self requiresScalingToWidth:i420Buffer.width
height:i420Buffer.height]) {
CVPixelBufferCreate(NULL,
i420Buffer.width,
i420Buffer.height,
pixelFormat,
NULL,
&scaledPixelBuffer);
[self cropAndScaleTo:scaledPixelBuffer withTempBuffer:NULL];
CVPixelBufferLockBaseAddress(scaledPixelBuffer, kCVPixelBufferLock_ReadOnly);
CVPixelBufferLockBaseAddress(scaledPixelBuffer,
kCVPixelBufferLock_ReadOnly);
sourcePixelBuffer = scaledPixelBuffer;
} else {
sourcePixelBuffer = _pixelBuffer;
}
const uint8_t* src = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(sourcePixelBuffer));
const uint8_t* src =
static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(sourcePixelBuffer));
const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(sourcePixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA) {
@ -255,7 +269,8 @@
}
if (scaledPixelBuffer) {
CVPixelBufferUnlockBaseAddress(scaledPixelBuffer, kCVPixelBufferLock_ReadOnly);
CVPixelBufferUnlockBaseAddress(scaledPixelBuffer,
kCVPixelBufferLock_ReadOnly);
CVBufferRelease(scaledPixelBuffer);
}
break;
@ -276,7 +291,9 @@
- (id)debugQuickLookObject {
CGImageRef cgImage;
VTCreateCGImageFromCVPixelBuffer(_pixelBuffer, NULL, &cgImage);
UIImage *image = [UIImage imageWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationUp];
UIImage* image = [UIImage imageWithCGImage:cgImage
scale:1.0
orientation:UIImageOrientationUp];
CGImageRelease(cgImage);
return image;
}
@ -284,7 +301,8 @@
#pragma mark - Private
- (void)cropAndScaleNV12To:(CVPixelBufferRef)outputPixelBuffer withTempBuffer:(uint8_t*)tmpBuffer {
- (void)cropAndScaleNV12To:(CVPixelBufferRef)outputPixelBuffer
withTempBuffer:(uint8_t*)tmpBuffer {
// Prepare output pointers.
CVReturn cvRet = CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
if (cvRet != kCVReturnSuccess) {
@ -292,18 +310,22 @@
}
const int dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
const int dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
uint8_t* dstY =
reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0));
const int dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
uint8_t* dstUV =
reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1));
const int dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
uint8_t* dstY = reinterpret_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0));
const int dstYStride =
CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
uint8_t* dstUV = reinterpret_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1));
const int dstUVStride =
CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
// Prepare source pointers.
CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
const uint8_t* srcY = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
const uint8_t* srcY = static_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
const uint8_t* srcUV = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
const uint8_t* srcUV = static_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
// Crop just by modifying pointers.
@ -337,16 +359,19 @@
const int dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
const int dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
uint8_t* dst = reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddress(outputPixelBuffer));
uint8_t* dst = reinterpret_cast<uint8_t*>(
CVPixelBufferGetBaseAddress(outputPixelBuffer));
const int dstStride = CVPixelBufferGetBytesPerRow(outputPixelBuffer);
// Prepare source pointers.
CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
const uint8_t* src = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(_pixelBuffer));
const uint8_t* src =
static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(_pixelBuffer));
const int srcStride = CVPixelBufferGetBytesPerRow(_pixelBuffer);
// Crop just by modifying pointers. Need to ensure that src pointer points to a byte corresponding
// to the start of a new pixel (byte with B for BGRA) so that libyuv scales correctly.
// Crop just by modifying pointers. Need to ensure that src pointer points to
// a byte corresponding to the start of a new pixel (byte with B for BGRA) so
// that libyuv scales correctly.
const int bytesPerPixel = 4;
src += srcStride * _cropY + (_cropX * bytesPerPixel);