Refactor WebRTC self assignments in if clauses
This change refactors existing self-assignments within if clauses across the WebRTC codebase. *Why:* - Bug Prevention: Assignments within conditionals are frequently unintended errors, often mistaken for equality checks. - Clearer Code: Separating assignments from conditionals improves code readability and reduces the risk of misinterpretation. Change-Id: I199dc26a35ceca109a2ac569b446811314dfdf0b Bug: chromium:361594695 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/360460 Reviewed-by: Chuck Hays <haysc@webrtc.org> Reviewed-by: Kári Helgason <kthelgason@webrtc.org> Commit-Queue: Kári Helgason <kthelgason@webrtc.org> Reviewed-by: Harald Alvestrand <hta@webrtc.org> Cr-Commit-Position: refs/heads/main@{#42850}
This commit is contained in:
parent
9e8652853e
commit
ab009c27b4
@ -84,7 +84,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
repeats:(BOOL)repeats
|
repeats:(BOOL)repeats
|
||||||
timerHandler:(void (^)(void))timerHandler {
|
timerHandler:(void (^)(void))timerHandler {
|
||||||
NSParameterAssert(timerHandler);
|
NSParameterAssert(timerHandler);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_timerHandler = timerHandler;
|
_timerHandler = timerHandler;
|
||||||
_timer = [NSTimer scheduledTimerWithTimeInterval:interval
|
_timer = [NSTimer scheduledTimerWithTimeInterval:interval
|
||||||
target:self
|
target:self
|
||||||
@ -140,7 +141,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithDelegate:(id<ARDAppClientDelegate>)delegate {
|
- (instancetype)initWithDelegate:(id<ARDAppClientDelegate>)delegate {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_roomServerClient = [[ARDAppEngineClient alloc] init];
|
_roomServerClient = [[ARDAppEngineClient alloc] init];
|
||||||
_delegate = delegate;
|
_delegate = delegate;
|
||||||
NSURL *turnRequestURL = [NSURL URLWithString:kARDIceServerRequestUrl];
|
NSURL *turnRequestURL = [NSURL URLWithString:kARDIceServerRequestUrl];
|
||||||
@ -160,7 +162,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
NSParameterAssert(rsClient);
|
NSParameterAssert(rsClient);
|
||||||
NSParameterAssert(channel);
|
NSParameterAssert(channel);
|
||||||
NSParameterAssert(turnClient);
|
NSParameterAssert(turnClient);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_roomServerClient = rsClient;
|
_roomServerClient = rsClient;
|
||||||
_channel = channel;
|
_channel = channel;
|
||||||
_turnClient = turnClient;
|
_turnClient = turnClient;
|
||||||
|
|||||||
@ -24,12 +24,12 @@ const Float64 kFramerateLimit = 30.0;
|
|||||||
|
|
||||||
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
|
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
|
||||||
settings:(ARDSettingsModel *)settings {
|
settings:(ARDSettingsModel *)settings {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_capturer = capturer;
|
_capturer = capturer;
|
||||||
_settings = settings;
|
_settings = settings;
|
||||||
_usingFrontCamera = YES;
|
_usingFrontCamera = YES;
|
||||||
}
|
}
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -24,7 +24,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
|||||||
@synthesize type = _type;
|
@synthesize type = _type;
|
||||||
|
|
||||||
- (instancetype)initWithType:(ARDSignalingMessageType)type {
|
- (instancetype)initWithType:(ARDSignalingMessageType)type {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_type = type;
|
_type = type;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
@ -79,7 +80,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
|||||||
@synthesize candidate = _candidate;
|
@synthesize candidate = _candidate;
|
||||||
|
|
||||||
- (instancetype)initWithCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate {
|
- (instancetype)initWithCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate {
|
||||||
if (self = [super initWithType:kARDSignalingMessageTypeCandidate]) {
|
self = [super initWithType:kARDSignalingMessageTypeCandidate];
|
||||||
|
if (self) {
|
||||||
_candidate = candidate;
|
_candidate = candidate;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
@ -97,7 +99,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
|||||||
|
|
||||||
- (instancetype)initWithRemovedCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
|
- (instancetype)initWithRemovedCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
|
||||||
NSParameterAssert(candidates.count);
|
NSParameterAssert(candidates.count);
|
||||||
if (self = [super initWithType:kARDSignalingMessageTypeCandidateRemoval]) {
|
self = [super initWithType:kARDSignalingMessageTypeCandidateRemoval];
|
||||||
|
if (self) {
|
||||||
_candidates = candidates;
|
_candidates = candidates;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
@ -130,7 +133,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
|||||||
NO, @"Unexpected type: %@", [RTC_OBJC_TYPE(RTCSessionDescription) stringForType:sdpType]);
|
NO, @"Unexpected type: %@", [RTC_OBJC_TYPE(RTCSessionDescription) stringForType:sdpType]);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (self = [super initWithType:messageType]) {
|
self = [super initWithType:messageType];
|
||||||
|
if (self) {
|
||||||
_sessionDescription = description;
|
_sessionDescription = description;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
|
|||||||
@ -24,7 +24,8 @@ static NSInteger kARDTURNClientErrorBadResponse = -1;
|
|||||||
|
|
||||||
- (instancetype)initWithURL:(NSURL *)url {
|
- (instancetype)initWithURL:(NSURL *)url {
|
||||||
NSParameterAssert([url absoluteString].length);
|
NSParameterAssert([url absoluteString].length);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_url = url;
|
_url = url;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
|
|||||||
@ -38,7 +38,8 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
|
|||||||
- (instancetype)initWithURL:(NSURL *)url
|
- (instancetype)initWithURL:(NSURL *)url
|
||||||
restURL:(NSURL *)restURL
|
restURL:(NSURL *)restURL
|
||||||
delegate:(id<ARDSignalingChannelDelegate>)delegate {
|
delegate:(id<ARDSignalingChannelDelegate>)delegate {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_url = url;
|
_url = url;
|
||||||
_restURL = restURL;
|
_restURL = restURL;
|
||||||
_delegate = delegate;
|
_delegate = delegate;
|
||||||
|
|||||||
@ -22,7 +22,8 @@
|
|||||||
@synthesize fileCapturer = _fileCapturer;
|
@synthesize fileCapturer = _fileCapturer;
|
||||||
|
|
||||||
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer {
|
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_fileCapturer = capturer;
|
_fileCapturer = capturer;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
|
|||||||
@ -26,7 +26,8 @@ static CGFloat const kCallControlMargin = 8;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithFrame:(CGRect)frame {
|
- (instancetype)initWithFrame:(CGRect)frame {
|
||||||
if (self = [super initWithFrame:frame]) {
|
self = [super initWithFrame:frame];
|
||||||
|
if (self) {
|
||||||
_roomText = [[UITextField alloc] initWithFrame:CGRectZero];
|
_roomText = [[UITextField alloc] initWithFrame:CGRectZero];
|
||||||
_roomText.borderStyle = UITextBorderStyleNone;
|
_roomText.borderStyle = UITextBorderStyleNone;
|
||||||
_roomText.font = [UIFont systemFontOfSize:12];
|
_roomText.font = [UIFont systemFontOfSize:12];
|
||||||
@ -82,7 +83,8 @@ static CGFloat const kCallControlMargin = 8;
|
|||||||
@synthesize isAudioLoopPlaying = _isAudioLoopPlaying;
|
@synthesize isAudioLoopPlaying = _isAudioLoopPlaying;
|
||||||
|
|
||||||
- (instancetype)initWithFrame:(CGRect)frame {
|
- (instancetype)initWithFrame:(CGRect)frame {
|
||||||
if (self = [super initWithFrame:frame]) {
|
self = [super initWithFrame:frame];
|
||||||
|
if (self) {
|
||||||
_roomText = [[ARDRoomTextField alloc] initWithFrame:CGRectZero];
|
_roomText = [[ARDRoomTextField alloc] initWithFrame:CGRectZero];
|
||||||
[self addSubview:_roomText];
|
[self addSubview:_roomText];
|
||||||
|
|
||||||
|
|||||||
@ -20,7 +20,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithFrame:(CGRect)frame {
|
- (instancetype)initWithFrame:(CGRect)frame {
|
||||||
if (self = [super initWithFrame:frame]) {
|
self = [super initWithFrame:frame];
|
||||||
|
if (self) {
|
||||||
_statsLabel = [[UILabel alloc] initWithFrame:CGRectZero];
|
_statsLabel = [[UILabel alloc] initWithFrame:CGRectZero];
|
||||||
_statsLabel.numberOfLines = 0;
|
_statsLabel.numberOfLines = 0;
|
||||||
_statsLabel.font = [UIFont fontWithName:@"Roboto" size:12];
|
_statsLabel.font = [UIFont fontWithName:@"Roboto" size:12];
|
||||||
|
|||||||
@ -39,8 +39,8 @@ static CGFloat const kStatusBarHeight = 20;
|
|||||||
@synthesize delegate = _delegate;
|
@synthesize delegate = _delegate;
|
||||||
|
|
||||||
- (instancetype)initWithFrame:(CGRect)frame {
|
- (instancetype)initWithFrame:(CGRect)frame {
|
||||||
if (self = [super initWithFrame:frame]) {
|
self = [super initWithFrame:frame];
|
||||||
|
if (self) {
|
||||||
_remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
|
_remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
|
||||||
|
|
||||||
[self addSubview:_remoteVideoView];
|
[self addSubview:_remoteVideoView];
|
||||||
|
|||||||
@ -45,7 +45,8 @@
|
|||||||
- (instancetype)initForRoom:(NSString *)room
|
- (instancetype)initForRoom:(NSString *)room
|
||||||
isLoopback:(BOOL)isLoopback
|
isLoopback:(BOOL)isLoopback
|
||||||
delegate:(id<ARDVideoCallViewControllerDelegate>)delegate {
|
delegate:(id<ARDVideoCallViewControllerDelegate>)delegate {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
|
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
|
||||||
_delegate = delegate;
|
_delegate = delegate;
|
||||||
|
|
||||||
|
|||||||
@ -26,7 +26,8 @@
|
|||||||
@synthesize capturer = _capturer;
|
@synthesize capturer = _capturer;
|
||||||
|
|
||||||
- (instancetype)init {
|
- (instancetype)init {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_callbackLogger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init];
|
_callbackLogger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init];
|
||||||
os_log_t rtc_os_log = os_log_create("com.google.AppRTCMobile", "RTCLog");
|
os_log_t rtc_os_log = os_log_create("com.google.AppRTCMobile", "RTCLog");
|
||||||
[_callbackLogger start:^(NSString *logMessage) {
|
[_callbackLogger start:^(NSString *logMessage) {
|
||||||
|
|||||||
@ -72,7 +72,8 @@ static NSUInteger const kBottomViewHeight = 200;
|
|||||||
#pragma mark - Private
|
#pragma mark - Private
|
||||||
|
|
||||||
- (instancetype)initWithFrame:(NSRect)frame {
|
- (instancetype)initWithFrame:(NSRect)frame {
|
||||||
if (self = [super initWithFrame:frame]) {
|
self = [super initWithFrame:frame];
|
||||||
|
if (self) {
|
||||||
[self setupViews];
|
[self setupViews];
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
|
|||||||
@ -370,7 +370,8 @@ std::unique_ptr<DesktopCapturer> CreateScreenCapturerSck(const DesktopCaptureOpt
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithCapturer:(webrtc::ScreenCapturerSck*)capturer {
|
- (instancetype)initWithCapturer:(webrtc::ScreenCapturerSck*)capturer {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_capturer = capturer;
|
_capturer = capturer;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
|
|||||||
@ -53,7 +53,8 @@ class VideoRendererAdapter
|
|||||||
|
|
||||||
- (instancetype)initWithNativeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoRenderer {
|
- (instancetype)initWithNativeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoRenderer {
|
||||||
NSParameterAssert(videoRenderer);
|
NSParameterAssert(videoRenderer);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_videoRenderer = videoRenderer;
|
_videoRenderer = videoRenderer;
|
||||||
_adapter.reset(new webrtc::VideoRendererAdapter(self));
|
_adapter.reset(new webrtc::VideoRendererAdapter(self));
|
||||||
}
|
}
|
||||||
|
|||||||
@ -24,9 +24,10 @@
|
|||||||
RTC_DCHECK(factory);
|
RTC_DCHECK(factory);
|
||||||
RTC_DCHECK(nativeAudioSource);
|
RTC_DCHECK(nativeAudioSource);
|
||||||
|
|
||||||
if (self = [super initWithFactory:factory
|
self = [super initWithFactory:factory
|
||||||
nativeMediaSource:nativeAudioSource
|
nativeMediaSource:nativeAudioSource
|
||||||
type:RTCMediaSourceTypeAudio]) {
|
type:RTCMediaSourceTypeAudio];
|
||||||
|
if (self) {
|
||||||
_nativeAudioSource = nativeAudioSource;
|
_nativeAudioSource = nativeAudioSource;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
|
|||||||
@ -28,7 +28,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithPrivateKey:(NSString *)private_key certificate:(NSString *)certificate {
|
- (instancetype)initWithPrivateKey:(NSString *)private_key certificate:(NSString *)certificate {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_private_key = [private_key copy];
|
_private_key = [private_key copy];
|
||||||
_certificate = [certificate copy];
|
_certificate = [certificate copy];
|
||||||
}
|
}
|
||||||
|
|||||||
@ -72,7 +72,8 @@
|
|||||||
|
|
||||||
- (instancetype)initWithNativeConfiguration:
|
- (instancetype)initWithNativeConfiguration:
|
||||||
(const webrtc::PeerConnectionInterface::RTCConfiguration &)config {
|
(const webrtc::PeerConnectionInterface::RTCConfiguration &)config {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_enableDscp = config.dscp();
|
_enableDscp = config.dscp();
|
||||||
NSMutableArray *iceServers = [NSMutableArray array];
|
NSMutableArray *iceServers = [NSMutableArray array];
|
||||||
for (const webrtc::PeerConnectionInterface::IceServer& server : config.servers) {
|
for (const webrtc::PeerConnectionInterface::IceServer& server : config.servers) {
|
||||||
|
|||||||
@ -21,7 +21,8 @@
|
|||||||
srtpEnableAes128Sha1_32CryptoCipher:(BOOL)srtpEnableAes128Sha1_32CryptoCipher
|
srtpEnableAes128Sha1_32CryptoCipher:(BOOL)srtpEnableAes128Sha1_32CryptoCipher
|
||||||
srtpEnableEncryptedRtpHeaderExtensions:(BOOL)srtpEnableEncryptedRtpHeaderExtensions
|
srtpEnableEncryptedRtpHeaderExtensions:(BOOL)srtpEnableEncryptedRtpHeaderExtensions
|
||||||
sframeRequireFrameEncryption:(BOOL)sframeRequireFrameEncryption {
|
sframeRequireFrameEncryption:(BOOL)sframeRequireFrameEncryption {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_srtpEnableGcmCryptoSuites = srtpEnableGcmCryptoSuites;
|
_srtpEnableGcmCryptoSuites = srtpEnableGcmCryptoSuites;
|
||||||
_srtpEnableAes128Sha1_32CryptoCipher = srtpEnableAes128Sha1_32CryptoCipher;
|
_srtpEnableAes128Sha1_32CryptoCipher = srtpEnableAes128Sha1_32CryptoCipher;
|
||||||
_srtpEnableEncryptedRtpHeaderExtensions = srtpEnableEncryptedRtpHeaderExtensions;
|
_srtpEnableEncryptedRtpHeaderExtensions = srtpEnableEncryptedRtpHeaderExtensions;
|
||||||
|
|||||||
@ -50,7 +50,8 @@ class DataChannelDelegateAdapter : public DataChannelObserver {
|
|||||||
|
|
||||||
- (instancetype)initWithData:(NSData *)data isBinary:(BOOL)isBinary {
|
- (instancetype)initWithData:(NSData *)data isBinary:(BOOL)isBinary {
|
||||||
NSParameterAssert(data);
|
NSParameterAssert(data);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
rtc::CopyOnWriteBuffer buffer(
|
rtc::CopyOnWriteBuffer buffer(
|
||||||
reinterpret_cast<const uint8_t*>(data.bytes), data.length);
|
reinterpret_cast<const uint8_t*>(data.bytes), data.length);
|
||||||
_dataBuffer.reset(new webrtc::DataBuffer(buffer, isBinary));
|
_dataBuffer.reset(new webrtc::DataBuffer(buffer, isBinary));
|
||||||
@ -70,7 +71,8 @@ class DataChannelDelegateAdapter : public DataChannelObserver {
|
|||||||
#pragma mark - Private
|
#pragma mark - Private
|
||||||
|
|
||||||
- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer&)nativeBuffer {
|
- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer&)nativeBuffer {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_dataBuffer.reset(new webrtc::DataBuffer(nativeBuffer));
|
_dataBuffer.reset(new webrtc::DataBuffer(nativeBuffer));
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
@ -167,7 +169,8 @@ class DataChannelDelegateAdapter : public DataChannelObserver {
|
|||||||
nativeDataChannel:
|
nativeDataChannel:
|
||||||
(rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel {
|
(rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel {
|
||||||
NSParameterAssert(nativeDataChannel);
|
NSParameterAssert(nativeDataChannel);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_factory = factory;
|
_factory = factory;
|
||||||
_nativeDataChannel = nativeDataChannel;
|
_nativeDataChannel = nativeDataChannel;
|
||||||
_observer.reset(new webrtc::DataChannelDelegateAdapter(self));
|
_observer.reset(new webrtc::DataChannelDelegateAdapter(self));
|
||||||
|
|||||||
@ -64,7 +64,8 @@
|
|||||||
- (instancetype)initWithNativeDtmfSender:
|
- (instancetype)initWithNativeDtmfSender:
|
||||||
(rtc::scoped_refptr<webrtc::DtmfSenderInterface>)nativeDtmfSender {
|
(rtc::scoped_refptr<webrtc::DtmfSenderInterface>)nativeDtmfSender {
|
||||||
NSParameterAssert(nativeDtmfSender);
|
NSParameterAssert(nativeDtmfSender);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_nativeDtmfSender = nativeDtmfSender;
|
_nativeDtmfSender = nativeDtmfSender;
|
||||||
RTCLogInfo(
|
RTCLogInfo(
|
||||||
@"RTC_OBJC_TYPE(RTCDtmfSender)(%p): created DTMF sender: %@", self, self.description);
|
@"RTC_OBJC_TYPE(RTCDtmfSender)(%p): created DTMF sender: %@", self, self.description);
|
||||||
|
|||||||
@ -73,7 +73,8 @@ class ObjCEncodedImageBuffer : public webrtc::EncodedImageBufferInterface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encodedImage {
|
- (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encodedImage {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
// A reference to the encodedData must be stored so that it's kept alive as long
|
// A reference to the encodedData must be stored so that it's kept alive as long
|
||||||
// self.buffer references its underlying data.
|
// self.buffer references its underlying data.
|
||||||
self.encodedData = encodedImage.GetEncodedData();
|
self.encodedData = encodedImage.GetEncodedData();
|
||||||
|
|||||||
@ -54,7 +54,8 @@ const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
|
|||||||
rotationType:(RTCFileLoggerRotationType)rotationType {
|
rotationType:(RTCFileLoggerRotationType)rotationType {
|
||||||
NSParameterAssert(dirPath.length);
|
NSParameterAssert(dirPath.length);
|
||||||
NSParameterAssert(maxFileSize);
|
NSParameterAssert(maxFileSize);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
BOOL isDir = NO;
|
BOOL isDir = NO;
|
||||||
NSFileManager *fileManager = [NSFileManager defaultManager];
|
NSFileManager *fileManager = [NSFileManager defaultManager];
|
||||||
if ([fileManager fileExistsAtPath:dirPath isDirectory:&isDir]) {
|
if ([fileManager fileExistsAtPath:dirPath isDirectory:&isDir]) {
|
||||||
|
|||||||
@ -26,7 +26,8 @@
|
|||||||
sdpMLineIndex:(int)sdpMLineIndex
|
sdpMLineIndex:(int)sdpMLineIndex
|
||||||
sdpMid:(NSString *)sdpMid {
|
sdpMid:(NSString *)sdpMid {
|
||||||
NSParameterAssert(sdp.length);
|
NSParameterAssert(sdp.length);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_sdpMid = [sdpMid copy];
|
_sdpMid = [sdpMid copy];
|
||||||
_sdpMLineIndex = sdpMLineIndex;
|
_sdpMLineIndex = sdpMLineIndex;
|
||||||
_sdp = [sdp copy];
|
_sdp = [sdp copy];
|
||||||
|
|||||||
@ -84,7 +84,8 @@
|
|||||||
tlsAlpnProtocols:(NSArray<NSString *> *)tlsAlpnProtocols
|
tlsAlpnProtocols:(NSArray<NSString *> *)tlsAlpnProtocols
|
||||||
tlsEllipticCurves:(NSArray<NSString *> *)tlsEllipticCurves {
|
tlsEllipticCurves:(NSArray<NSString *> *)tlsEllipticCurves {
|
||||||
NSParameterAssert(urlStrings.count);
|
NSParameterAssert(urlStrings.count);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_urlStrings = [[NSArray alloc] initWithArray:urlStrings copyItems:YES];
|
_urlStrings = [[NSArray alloc] initWithArray:urlStrings copyItems:YES];
|
||||||
_username = [username copy];
|
_username = [username copy];
|
||||||
_credential = [credential copy];
|
_credential = [credential copy];
|
||||||
|
|||||||
@ -33,7 +33,8 @@
|
|||||||
#pragma mark - Private
|
#pragma mark - Private
|
||||||
|
|
||||||
- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport {
|
- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_timestamp = nativeReport.timestamp();
|
_timestamp = nativeReport.timestamp();
|
||||||
_type = [NSString stringForStdString:nativeReport.TypeToString()];
|
_type = [NSString stringForStdString:nativeReport.TypeToString()];
|
||||||
_reportId = [NSString stringForStdString:
|
_reportId = [NSString stringForStdString:
|
||||||
|
|||||||
@ -37,7 +37,8 @@ NSString *const kRTCMediaConstraintsValueFalse = @(webrtc::MediaConstraints::kVa
|
|||||||
(NSDictionary<NSString *, NSString *> *)mandatory
|
(NSDictionary<NSString *, NSString *> *)mandatory
|
||||||
optionalConstraints:
|
optionalConstraints:
|
||||||
(NSDictionary<NSString *, NSString *> *)optional {
|
(NSDictionary<NSString *, NSString *> *)optional {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_mandatory = [[NSDictionary alloc] initWithDictionary:mandatory
|
_mandatory = [[NSDictionary alloc] initWithDictionary:mandatory
|
||||||
copyItems:YES];
|
copyItems:YES];
|
||||||
_optional = [[NSDictionary alloc] initWithDictionary:optional
|
_optional = [[NSDictionary alloc] initWithDictionary:optional
|
||||||
|
|||||||
@ -24,7 +24,8 @@
|
|||||||
type:(RTCMediaSourceType)type {
|
type:(RTCMediaSourceType)type {
|
||||||
RTC_DCHECK(factory);
|
RTC_DCHECK(factory);
|
||||||
RTC_DCHECK(nativeMediaSource);
|
RTC_DCHECK(nativeMediaSource);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_factory = factory;
|
_factory = factory;
|
||||||
_nativeMediaSource = nativeMediaSource;
|
_nativeMediaSource = nativeMediaSource;
|
||||||
_type = type;
|
_type = type;
|
||||||
|
|||||||
@ -120,7 +120,8 @@
|
|||||||
nativeMediaStream:
|
nativeMediaStream:
|
||||||
(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
|
(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
|
||||||
NSParameterAssert(nativeMediaStream);
|
NSParameterAssert(nativeMediaStream);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_factory = factory;
|
_factory = factory;
|
||||||
_signalingThread = factory.signalingThread;
|
_signalingThread = factory.signalingThread;
|
||||||
|
|
||||||
|
|||||||
@ -81,7 +81,8 @@ NSString * const kRTCMediaStreamTrackKindVideo =
|
|||||||
type:(RTCMediaStreamTrackType)type {
|
type:(RTCMediaStreamTrackType)type {
|
||||||
NSParameterAssert(nativeTrack);
|
NSParameterAssert(nativeTrack);
|
||||||
NSParameterAssert(factory);
|
NSParameterAssert(factory);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_factory = factory;
|
_factory = factory;
|
||||||
_nativeTrack = nativeTrack;
|
_nativeTrack = nativeTrack;
|
||||||
_type = type;
|
_type = type;
|
||||||
|
|||||||
@ -24,7 +24,8 @@
|
|||||||
|
|
||||||
- (instancetype)initWithNativeSampleInfo:
|
- (instancetype)initWithNativeSampleInfo:
|
||||||
(const webrtc::metrics::SampleInfo &)info {
|
(const webrtc::metrics::SampleInfo &)info {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_name = [NSString stringForStdString:info.name];
|
_name = [NSString stringForStdString:info.name];
|
||||||
_min = info.min;
|
_min = info.min;
|
||||||
_max = info.max;
|
_max = info.max;
|
||||||
|
|||||||
@ -365,7 +365,8 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack(
|
|||||||
if (!config) {
|
if (!config) {
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_observer.reset(new webrtc::PeerConnectionDelegateAdapter(self));
|
_observer.reset(new webrtc::PeerConnectionDelegateAdapter(self));
|
||||||
_nativeConstraints = constraints.nativeConstraints;
|
_nativeConstraints = constraints.nativeConstraints;
|
||||||
CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(), config.get());
|
CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(), config.get());
|
||||||
|
|||||||
@ -119,7 +119,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initNative {
|
- (instancetype)initNative {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_networkThread = rtc::Thread::CreateWithSocketServer();
|
_networkThread = rtc::Thread::CreateWithSocketServer();
|
||||||
_networkThread->SetName("network_thread", _networkThread.get());
|
_networkThread->SetName("network_thread", _networkThread.get());
|
||||||
BOOL result = _networkThread->Start();
|
BOOL result = _networkThread->Start();
|
||||||
|
|||||||
@ -23,7 +23,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithNativeParameters:(const webrtc::RtcpParameters &)nativeParameters {
|
- (instancetype)initWithNativeParameters:(const webrtc::RtcpParameters &)nativeParameters {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_cname = [NSString stringForStdString:nativeParameters.cname];
|
_cname = [NSString stringForStdString:nativeParameters.cname];
|
||||||
_isReducedSize = nativeParameters.reduced_size;
|
_isReducedSize = nativeParameters.reduced_size;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -28,7 +28,8 @@
|
|||||||
|
|
||||||
- (instancetype)initWithNativeRtpCapabilities:
|
- (instancetype)initWithNativeRtpCapabilities:
|
||||||
(const webrtc::RtpCapabilities &)nativeRtpCapabilities {
|
(const webrtc::RtpCapabilities &)nativeRtpCapabilities {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
NSMutableArray *codecs = [[NSMutableArray alloc] init];
|
NSMutableArray *codecs = [[NSMutableArray alloc] init];
|
||||||
for (const auto &codec : nativeRtpCapabilities.codecs) {
|
for (const auto &codec : nativeRtpCapabilities.codecs) {
|
||||||
[codecs addObject:[[RTC_OBJC_TYPE(RTCRtpCodecCapability) alloc]
|
[codecs addObject:[[RTC_OBJC_TYPE(RTCRtpCodecCapability) alloc]
|
||||||
|
|||||||
@ -33,7 +33,8 @@
|
|||||||
|
|
||||||
- (instancetype)initWithNativeRtpCodecCapability:
|
- (instancetype)initWithNativeRtpCodecCapability:
|
||||||
(const webrtc::RtpCodecCapability &)nativeRtpCodecCapability {
|
(const webrtc::RtpCodecCapability &)nativeRtpCodecCapability {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
if (nativeRtpCodecCapability.preferred_payload_type) {
|
if (nativeRtpCodecCapability.preferred_payload_type) {
|
||||||
_preferredPayloadType =
|
_preferredPayloadType =
|
||||||
[NSNumber numberWithInt:*nativeRtpCodecCapability.preferred_payload_type];
|
[NSNumber numberWithInt:*nativeRtpCodecCapability.preferred_payload_type];
|
||||||
|
|||||||
@ -49,7 +49,8 @@ const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName);
|
|||||||
|
|
||||||
- (instancetype)initWithNativeParameters:
|
- (instancetype)initWithNativeParameters:
|
||||||
(const webrtc::RtpCodecParameters &)nativeParameters {
|
(const webrtc::RtpCodecParameters &)nativeParameters {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_payloadType = nativeParameters.payload_type;
|
_payloadType = nativeParameters.payload_type;
|
||||||
_name = [NSString stringForStdString:nativeParameters.name];
|
_name = [NSString stringForStdString:nativeParameters.name];
|
||||||
switch (nativeParameters.kind) {
|
switch (nativeParameters.kind) {
|
||||||
|
|||||||
@ -33,7 +33,8 @@
|
|||||||
|
|
||||||
- (instancetype)initWithNativeParameters:
|
- (instancetype)initWithNativeParameters:
|
||||||
(const webrtc::RtpEncodingParameters &)nativeParameters {
|
(const webrtc::RtpEncodingParameters &)nativeParameters {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
if (!nativeParameters.rid.empty()) {
|
if (!nativeParameters.rid.empty()) {
|
||||||
_rid = [NSString stringForStdString:nativeParameters.rid];
|
_rid = [NSString stringForStdString:nativeParameters.rid];
|
||||||
}
|
}
|
||||||
|
|||||||
@ -24,7 +24,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithNativeParameters:(const webrtc::RtpExtension &)nativeParameters {
|
- (instancetype)initWithNativeParameters:(const webrtc::RtpExtension &)nativeParameters {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_uri = [NSString stringForStdString:nativeParameters.uri];
|
_uri = [NSString stringForStdString:nativeParameters.uri];
|
||||||
_id = nativeParameters.id;
|
_id = nativeParameters.id;
|
||||||
_encrypted = nativeParameters.encrypt;
|
_encrypted = nativeParameters.encrypt;
|
||||||
|
|||||||
@ -25,7 +25,8 @@
|
|||||||
|
|
||||||
- (instancetype)initWithNativeRtpHeaderExtensionCapability:
|
- (instancetype)initWithNativeRtpHeaderExtensionCapability:
|
||||||
(const webrtc::RtpHeaderExtensionCapability &)nativeRtpHeaderExtensionCapability {
|
(const webrtc::RtpHeaderExtensionCapability &)nativeRtpHeaderExtensionCapability {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_uri = [NSString stringForStdString:nativeRtpHeaderExtensionCapability.uri];
|
_uri = [NSString stringForStdString:nativeRtpHeaderExtensionCapability.uri];
|
||||||
if (nativeRtpHeaderExtensionCapability.preferred_id) {
|
if (nativeRtpHeaderExtensionCapability.preferred_id) {
|
||||||
_preferredId = [NSNumber numberWithInt:*nativeRtpHeaderExtensionCapability.preferred_id];
|
_preferredId = [NSNumber numberWithInt:*nativeRtpHeaderExtensionCapability.preferred_id];
|
||||||
|
|||||||
@ -32,7 +32,8 @@
|
|||||||
|
|
||||||
- (instancetype)initWithNativeParameters:
|
- (instancetype)initWithNativeParameters:
|
||||||
(const webrtc::RtpParameters &)nativeParameters {
|
(const webrtc::RtpParameters &)nativeParameters {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_transactionId = [NSString stringForStdString:nativeParameters.transaction_id];
|
_transactionId = [NSString stringForStdString:nativeParameters.transaction_id];
|
||||||
_rtcp =
|
_rtcp =
|
||||||
[[RTC_OBJC_TYPE(RTCRtcpParameters) alloc] initWithNativeParameters:nativeParameters.rtcp];
|
[[RTC_OBJC_TYPE(RTCRtcpParameters) alloc] initWithNativeParameters:nativeParameters.rtcp];
|
||||||
|
|||||||
@ -117,7 +117,8 @@ void RtpReceiverDelegateAdapter::OnFirstPacketReceived(
|
|||||||
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeRtpReceiver:
|
nativeRtpReceiver:
|
||||||
(rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
|
(rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_factory = factory;
|
_factory = factory;
|
||||||
_nativeRtpReceiver = nativeRtpReceiver;
|
_nativeRtpReceiver = nativeRtpReceiver;
|
||||||
RTCLogInfo(@"RTC_OBJC_TYPE(RTCRtpReceiver)(%p): created receiver: %@", self, self.description);
|
RTCLogInfo(@"RTC_OBJC_TYPE(RTCRtpReceiver)(%p): created receiver: %@", self, self.description);
|
||||||
|
|||||||
@ -113,7 +113,8 @@
|
|||||||
nativeRtpSender:(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
|
nativeRtpSender:(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
|
||||||
NSParameterAssert(factory);
|
NSParameterAssert(factory);
|
||||||
NSParameterAssert(nativeRtpSender);
|
NSParameterAssert(nativeRtpSender);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_factory = factory;
|
_factory = factory;
|
||||||
_nativeRtpSender = nativeRtpSender;
|
_nativeRtpSender = nativeRtpSender;
|
||||||
if (_nativeRtpSender->media_type() == cricket::MEDIA_TYPE_AUDIO) {
|
if (_nativeRtpSender->media_type() == cricket::MEDIA_TYPE_AUDIO) {
|
||||||
|
|||||||
@ -65,7 +65,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithNativeRtpSource:(const webrtc::RtpSource &)nativeRtpSource {
|
- (instancetype)initWithNativeRtpSource:(const webrtc::RtpSource &)nativeRtpSource {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_nativeRtpSource = nativeRtpSource;
|
_nativeRtpSource = nativeRtpSource;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
|
|||||||
@ -29,7 +29,8 @@ NSString *const kRTCRtpTransceiverErrorDomain = @"org.webrtc.RTCRtpTranceiver";
|
|||||||
@synthesize sendEncodings = _sendEncodings;
|
@synthesize sendEncodings = _sendEncodings;
|
||||||
|
|
||||||
- (instancetype)init {
|
- (instancetype)init {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_direction = RTCRtpTransceiverDirectionSendRecv;
|
_direction = RTCRtpTransceiverDirectionSendRecv;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
@ -166,7 +167,8 @@ NSString *const kRTCRtpTransceiverErrorDomain = @"org.webrtc.RTCRtpTranceiver";
|
|||||||
(rtc::scoped_refptr<webrtc::RtpTransceiverInterface>)nativeRtpTransceiver {
|
(rtc::scoped_refptr<webrtc::RtpTransceiverInterface>)nativeRtpTransceiver {
|
||||||
NSParameterAssert(factory);
|
NSParameterAssert(factory);
|
||||||
NSParameterAssert(nativeRtpTransceiver);
|
NSParameterAssert(nativeRtpTransceiver);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_factory = factory;
|
_factory = factory;
|
||||||
_nativeRtpTransceiver = nativeRtpTransceiver;
|
_nativeRtpTransceiver = nativeRtpTransceiver;
|
||||||
_sender = [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:_factory
|
_sender = [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:_factory
|
||||||
|
|||||||
@ -31,7 +31,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp {
|
- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_type = type;
|
_type = type;
|
||||||
_sdp = [sdp copy];
|
_sdp = [sdp copy];
|
||||||
}
|
}
|
||||||
|
|||||||
@ -114,7 +114,8 @@ NSObject *ValueFromStatsAttribute(const Attribute &attribute) {
|
|||||||
@synthesize values = _values;
|
@synthesize values = _values;
|
||||||
|
|
||||||
- (instancetype)initWithStatistics:(const webrtc::RTCStats &)statistics {
|
- (instancetype)initWithStatistics:(const webrtc::RTCStats &)statistics {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_id = [NSString stringForStdString:statistics.id()];
|
_id = [NSString stringForStdString:statistics.id()];
|
||||||
_timestamp_us = statistics.timestamp().us();
|
_timestamp_us = statistics.timestamp().us();
|
||||||
_type = [NSString stringWithCString:statistics.type() encoding:NSUTF8StringEncoding];
|
_type = [NSString stringWithCString:statistics.type() encoding:NSUTF8StringEncoding];
|
||||||
@ -161,7 +162,8 @@ NSObject *ValueFromStatsAttribute(const Attribute &attribute) {
|
|||||||
@implementation RTC_OBJC_TYPE (RTCStatisticsReport) (Private)
|
@implementation RTC_OBJC_TYPE (RTCStatisticsReport) (Private)
|
||||||
|
|
||||||
- (instancetype)initWithReport : (const webrtc::RTCStatsReport &)report {
|
- (instancetype)initWithReport : (const webrtc::RTCStatsReport &)report {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_timestamp_us = report.timestamp().us();
|
_timestamp_us = report.timestamp().us();
|
||||||
|
|
||||||
NSMutableDictionary *statisticsById =
|
NSMutableDictionary *statisticsById =
|
||||||
|
|||||||
@ -16,7 +16,8 @@
|
|||||||
(Private)
|
(Private)
|
||||||
|
|
||||||
- (instancetype)initWithNativeVideoCodec : (const webrtc::VideoCodec *)videoCodec {
|
- (instancetype)initWithNativeVideoCodec : (const webrtc::VideoCodec *)videoCodec {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
if (videoCodec) {
|
if (videoCodec) {
|
||||||
const char *codecName = CodecTypeToPayloadString(videoCodec->codecType);
|
const char *codecName = CodecTypeToPayloadString(videoCodec->codecType);
|
||||||
self.name = [NSString stringWithUTF8String:codecName];
|
self.name = [NSString stringWithUTF8String:codecName];
|
||||||
@ -31,7 +32,6 @@
|
|||||||
self.mode = (RTCVideoCodecMode)videoCodec->mode;
|
self.mode = (RTCVideoCodecMode)videoCodec->mode;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -33,9 +33,10 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(
|
|||||||
(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
|
(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
|
||||||
RTC_DCHECK(factory);
|
RTC_DCHECK(factory);
|
||||||
RTC_DCHECK(nativeVideoSource);
|
RTC_DCHECK(nativeVideoSource);
|
||||||
if (self = [super initWithFactory:factory
|
self = [super initWithFactory:factory
|
||||||
nativeMediaSource:nativeVideoSource
|
nativeMediaSource:nativeVideoSource
|
||||||
type:RTCMediaSourceTypeVideo]) {
|
type:RTCMediaSourceTypeVideo];
|
||||||
|
if (self) {
|
||||||
_nativeVideoSource = nativeVideoSource;
|
_nativeVideoSource = nativeVideoSource;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
|
|||||||
@ -45,7 +45,8 @@
|
|||||||
NSParameterAssert(factory);
|
NSParameterAssert(factory);
|
||||||
NSParameterAssert(nativeMediaTrack);
|
NSParameterAssert(nativeMediaTrack);
|
||||||
NSParameterAssert(type == RTCMediaStreamTrackTypeVideo);
|
NSParameterAssert(type == RTCMediaStreamTrackTypeVideo);
|
||||||
if (self = [super initWithFactory:factory nativeTrack:nativeMediaTrack type:type]) {
|
self = [super initWithFactory:factory nativeTrack:nativeMediaTrack type:type];
|
||||||
|
if (self) {
|
||||||
_adapters = [NSMutableArray array];
|
_adapters = [NSMutableArray array];
|
||||||
_workerThread = factory.workerThread;
|
_workerThread = factory.workerThread;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -20,10 +20,10 @@
|
|||||||
@implementation RTC_OBJC_TYPE (RTCI420Buffer)
|
@implementation RTC_OBJC_TYPE (RTCI420Buffer)
|
||||||
|
|
||||||
- (instancetype)initWithWidth:(int)width height:(int)height {
|
- (instancetype)initWithWidth:(int)width height:(int)height {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_i420Buffer = webrtc::I420Buffer::Create(width, height);
|
_i420Buffer = webrtc::I420Buffer::Create(width, height);
|
||||||
}
|
}
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -32,7 +32,8 @@
|
|||||||
dataY:(const uint8_t *)dataY
|
dataY:(const uint8_t *)dataY
|
||||||
dataU:(const uint8_t *)dataU
|
dataU:(const uint8_t *)dataU
|
||||||
dataV:(const uint8_t *)dataV {
|
dataV:(const uint8_t *)dataV {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_i420Buffer = webrtc::I420Buffer::Copy(
|
_i420Buffer = webrtc::I420Buffer::Copy(
|
||||||
width, height, dataY, width, dataU, (width + 1) / 2, dataV, (width + 1) / 2);
|
width, height, dataY, width, dataU, (width + 1) / 2, dataV, (width + 1) / 2);
|
||||||
}
|
}
|
||||||
@ -44,18 +45,18 @@
|
|||||||
strideY:(int)strideY
|
strideY:(int)strideY
|
||||||
strideU:(int)strideU
|
strideU:(int)strideU
|
||||||
strideV:(int)strideV {
|
strideV:(int)strideV {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_i420Buffer = webrtc::I420Buffer::Create(width, height, strideY, strideU, strideV);
|
_i420Buffer = webrtc::I420Buffer::Create(width, height, strideY, strideU, strideV);
|
||||||
}
|
}
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithFrameBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer {
|
- (instancetype)initWithFrameBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_i420Buffer = i420Buffer;
|
_i420Buffer = i420Buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -15,7 +15,8 @@
|
|||||||
@synthesize delegate = _delegate;
|
@synthesize delegate = _delegate;
|
||||||
|
|
||||||
- (instancetype)initWithDelegate:(id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
|
- (instancetype)initWithDelegate:(id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_delegate = delegate;
|
_delegate = delegate;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
|
|||||||
@ -21,7 +21,8 @@
|
|||||||
|
|
||||||
- (instancetype)initWithName:(NSString *)name
|
- (instancetype)initWithName:(NSString *)name
|
||||||
parameters:(nullable NSDictionary<NSString *, NSString *> *)parameters {
|
parameters:(nullable NSDictionary<NSString *, NSString *> *)parameters {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_name = name;
|
_name = name;
|
||||||
_parameters = (parameters ? parameters : @{});
|
_parameters = (parameters ? parameters : @{});
|
||||||
}
|
}
|
||||||
|
|||||||
@ -16,7 +16,8 @@
|
|||||||
@synthesize high = _high;
|
@synthesize high = _high;
|
||||||
|
|
||||||
- (instancetype)initWithThresholdsLow:(NSInteger)low high:(NSInteger)high {
|
- (instancetype)initWithThresholdsLow:(NSInteger)low high:(NSInteger)high {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_low = low;
|
_low = low;
|
||||||
_high = high;
|
_high = high;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -66,12 +66,12 @@
|
|||||||
- (instancetype)initWithBuffer:(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>)buffer
|
- (instancetype)initWithBuffer:(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>)buffer
|
||||||
rotation:(RTCVideoRotation)rotation
|
rotation:(RTCVideoRotation)rotation
|
||||||
timeStampNs:(int64_t)timeStampNs {
|
timeStampNs:(int64_t)timeStampNs {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_buffer = buffer;
|
_buffer = buffer;
|
||||||
_rotation = rotation;
|
_rotation = rotation;
|
||||||
_timeStampNs = timeStampNs;
|
_timeStampNs = timeStampNs;
|
||||||
}
|
}
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -77,7 +77,8 @@ ABSL_CONST_INIT thread_local bool mutex_locked = false;
|
|||||||
|
|
||||||
/** This initializer provides a way for unit tests to inject a fake/mock audio session. */
|
/** This initializer provides a way for unit tests to inject a fake/mock audio session. */
|
||||||
- (instancetype)initWithAudioSession:(id)audioSession {
|
- (instancetype)initWithAudioSession:(id)audioSession {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_session = audioSession;
|
_session = audioSession;
|
||||||
|
|
||||||
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
|
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
|
||||||
|
|||||||
@ -53,7 +53,8 @@ static RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *gWebRTCConfiguration = nil;
|
|||||||
@synthesize outputNumberOfChannels = _outputNumberOfChannels;
|
@synthesize outputNumberOfChannels = _outputNumberOfChannels;
|
||||||
|
|
||||||
- (instancetype)init {
|
- (instancetype)init {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
// Use a category which supports simultaneous recording and playback.
|
// Use a category which supports simultaneous recording and playback.
|
||||||
// By default, using this category implies that our app’s audio is
|
// By default, using this category implies that our app’s audio is
|
||||||
// nonmixable, hence activating the session will interrupt any other
|
// nonmixable, hence activating the session will interrupt any other
|
||||||
|
|||||||
@ -20,7 +20,8 @@
|
|||||||
|
|
||||||
- (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer {
|
- (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer {
|
||||||
RTC_DCHECK(observer);
|
RTC_DCHECK(observer);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_observer = observer;
|
_observer = observer;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
|
|||||||
@ -65,7 +65,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
// This initializer is used for testing.
|
// This initializer is used for testing.
|
||||||
- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate
|
- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate
|
||||||
captureSession:(AVCaptureSession *)captureSession {
|
captureSession:(AVCaptureSession *)captureSession {
|
||||||
if (self = [super initWithDelegate:delegate]) {
|
self = [super initWithDelegate:delegate];
|
||||||
|
if (self) {
|
||||||
// Create the capture session and all relevant inputs and outputs. We need
|
// Create the capture session and all relevant inputs and outputs. We need
|
||||||
// to do this in init because the application may want the capture session
|
// to do this in init because the application may want the capture session
|
||||||
// before we start the capturer for e.g. AVCapturePreviewLayer. All objects
|
// before we start the capturer for e.g. AVCapturePreviewLayer. All objects
|
||||||
|
|||||||
@ -54,7 +54,8 @@ rtc::AdapterType AdapterTypeFromInterfaceType(nw_interface_type_t interfaceType)
|
|||||||
|
|
||||||
- (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer {
|
- (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer {
|
||||||
RTC_DCHECK(observer);
|
RTC_DCHECK(observer);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_observer = observer;
|
_observer = observer;
|
||||||
if (@available(iOS 12, *)) {
|
if (@available(iOS 12, *)) {
|
||||||
_pathMonitor = nw_path_monitor_create();
|
_pathMonitor = nw_path_monitor_create();
|
||||||
|
|||||||
@ -115,7 +115,8 @@ static const NSInteger kMaxInflightBuffers = 1;
|
|||||||
@synthesize rotationOverride = _rotationOverride;
|
@synthesize rotationOverride = _rotationOverride;
|
||||||
|
|
||||||
- (instancetype)init {
|
- (instancetype)init {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
|
_inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -19,7 +19,8 @@
|
|||||||
|
|
||||||
- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler {
|
- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler {
|
||||||
NSParameterAssert(timerHandler);
|
NSParameterAssert(timerHandler);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_timerHandler = timerHandler;
|
_timerHandler = timerHandler;
|
||||||
_displayLink =
|
_displayLink =
|
||||||
[CADisplayLink displayLinkWithTarget:self
|
[CADisplayLink displayLinkWithTarget:self
|
||||||
|
|||||||
@ -69,7 +69,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
|
- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
|
||||||
if (self = [super initWithFrame:frame]) {
|
self = [super initWithFrame:frame];
|
||||||
|
if (self) {
|
||||||
_shader = shader;
|
_shader = shader;
|
||||||
if (![self configure]) {
|
if (![self configure]) {
|
||||||
return nil;
|
return nil;
|
||||||
@ -80,7 +81,8 @@
|
|||||||
|
|
||||||
- (instancetype)initWithCoder:(NSCoder *)aDecoder
|
- (instancetype)initWithCoder:(NSCoder *)aDecoder
|
||||||
shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
|
shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
|
||||||
if (self = [super initWithCoder:aDecoder]) {
|
self = [super initWithCoder:aDecoder];
|
||||||
|
if (self) {
|
||||||
_shader = shader;
|
_shader = shader;
|
||||||
if (![self configure]) {
|
if (![self configure]) {
|
||||||
return nil;
|
return nil;
|
||||||
|
|||||||
@ -46,7 +46,8 @@ static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithContext:(GlContextType *)context {
|
- (instancetype)initWithContext:(GlContextType *)context {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3);
|
_hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3);
|
||||||
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
||||||
|
|
||||||
|
|||||||
@ -29,7 +29,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithContext:(EAGLContext *)context {
|
- (instancetype)initWithContext:(EAGLContext *)context {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
CVReturn ret = CVOpenGLESTextureCacheCreate(
|
CVReturn ret = CVOpenGLESTextureCacheCreate(
|
||||||
kCFAllocatorDefault, NULL,
|
kCFAllocatorDefault, NULL,
|
||||||
#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
|
#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
|
||||||
|
|||||||
@ -90,7 +90,8 @@ NSString *MaxSupportedProfileLevelConstrainedHigh() {
|
|||||||
@synthesize hexString = _hexString;
|
@synthesize hexString = _hexString;
|
||||||
|
|
||||||
- (instancetype)initWithHexString:(NSString *)hexString {
|
- (instancetype)initWithHexString:(NSString *)hexString {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
self.hexString = hexString;
|
self.hexString = hexString;
|
||||||
|
|
||||||
absl::optional<webrtc::H264ProfileLevelId> profile_level_id =
|
absl::optional<webrtc::H264ProfileLevelId> profile_level_id =
|
||||||
@ -104,7 +105,8 @@ NSString *MaxSupportedProfileLevelConstrainedHigh() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithProfile:(RTCH264Profile)profile level:(RTCH264Level)level {
|
- (instancetype)initWithProfile:(RTCH264Profile)profile level:(RTCH264Level)level {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
self.profile = profile;
|
self.profile = profile;
|
||||||
self.level = level;
|
self.level = level;
|
||||||
|
|
||||||
|
|||||||
@ -344,7 +344,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
|
|||||||
// conditions, 0.95 seems to give us better overall bitrate over long periods
|
// conditions, 0.95 seems to give us better overall bitrate over long periods
|
||||||
// of time.
|
// of time.
|
||||||
- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo {
|
- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_codecInfo = codecInfo;
|
_codecInfo = codecInfo;
|
||||||
_bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95));
|
_bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95));
|
||||||
_packetizationMode = RTCH264PacketizationModeNonInterleaved;
|
_packetizationMode = RTCH264PacketizationModeNonInterleaved;
|
||||||
|
|||||||
@ -62,7 +62,8 @@
|
|||||||
cropHeight:(int)cropHeight
|
cropHeight:(int)cropHeight
|
||||||
cropX:(int)cropX
|
cropX:(int)cropX
|
||||||
cropY:(int)cropY {
|
cropY:(int)cropY {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_width = adaptedWidth;
|
_width = adaptedWidth;
|
||||||
_height = adaptedHeight;
|
_height = adaptedHeight;
|
||||||
_pixelBuffer = pixelBuffer;
|
_pixelBuffer = pixelBuffer;
|
||||||
|
|||||||
@ -75,7 +75,8 @@ class AudioDeviceDelegateImpl final : public rtc::RefCountedNonVirtual<AudioDevi
|
|||||||
(rtc::scoped_refptr<webrtc::objc_adm::ObjCAudioDeviceModule>)audioDeviceModule
|
(rtc::scoped_refptr<webrtc::objc_adm::ObjCAudioDeviceModule>)audioDeviceModule
|
||||||
audioDeviceThread:(rtc::Thread*)thread {
|
audioDeviceThread:(rtc::Thread*)thread {
|
||||||
RTC_DCHECK_RUN_ON(thread);
|
RTC_DCHECK_RUN_ON(thread);
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
impl_ = rtc::make_ref_counted<AudioDeviceDelegateImpl>(audioDeviceModule, thread);
|
impl_ = rtc::make_ref_counted<AudioDeviceDelegateImpl>(audioDeviceModule, thread);
|
||||||
preferredInputSampleRate_ = kPreferredInputSampleRate;
|
preferredInputSampleRate_ = kPreferredInputSampleRate;
|
||||||
preferredInputIOBufferDuration_ = kPeferredInputIOBufferDuration;
|
preferredInputIOBufferDuration_ = kPeferredInputIOBufferDuration;
|
||||||
|
|||||||
@ -53,7 +53,8 @@
|
|||||||
@synthesize outputVolume = _outputVolume;
|
@synthesize outputVolume = _outputVolume;
|
||||||
|
|
||||||
- (instancetype)init {
|
- (instancetype)init {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_outputVolume = -1;
|
_outputVolume = -1;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
@ -98,7 +99,8 @@
|
|||||||
@implementation RTCTestRemoveOnDeallocDelegate
|
@implementation RTCTestRemoveOnDeallocDelegate
|
||||||
|
|
||||||
- (instancetype)init {
|
- (instancetype)init {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
||||||
[session addDelegate:self];
|
[session addDelegate:self];
|
||||||
}
|
}
|
||||||
|
|||||||
@ -42,7 +42,8 @@
|
|||||||
|
|
||||||
- (instancetype)initWithSupportedCodecs:
|
- (instancetype)initWithSupportedCodecs:
|
||||||
(nonnull NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
|
(nonnull NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
_supportedCodecs = supportedCodecs;
|
_supportedCodecs = supportedCodecs;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
|
|||||||
@ -53,7 +53,8 @@ static cricket::VideoFormat expectedFormat =
|
|||||||
- (instancetype)initWithMediaSubtype:(FourCharCode)subtype
|
- (instancetype)initWithMediaSubtype:(FourCharCode)subtype
|
||||||
minFps:(float)minFps
|
minFps:(float)minFps
|
||||||
maxFps:(float)maxFps {
|
maxFps:(float)maxFps {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
CMVideoFormatDescriptionCreate(nil, subtype, kFormatWidth, kFormatHeight,
|
CMVideoFormatDescriptionCreate(nil, subtype, kFormatWidth, kFormatHeight,
|
||||||
nil, &_format);
|
nil, &_format);
|
||||||
// We can use OCMock for the range.
|
// We can use OCMock for the range.
|
||||||
|
|||||||
@ -35,7 +35,8 @@
|
|||||||
static NSInteger nextYOrigin_;
|
static NSInteger nextYOrigin_;
|
||||||
|
|
||||||
- (id)initWithTitle:(NSString *)title width:(int)width height:(int)height {
|
- (id)initWithTitle:(NSString *)title width:(int)width height:(int)height {
|
||||||
if (self = [super init]) {
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
title_ = title;
|
title_ = title;
|
||||||
width_ = width;
|
width_ = width;
|
||||||
height_ = height;
|
height_ = height;
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user