Format /examples folder

Formatting done via:

git ls-files | grep -E '^examples\/.*\.(h|cc|mm)' | xargs clang-format -i

No-Iwyu: Includes didn't change and it isn't related to formatting
Bug: webrtc:42225392
Change-Id: I761ed6e6401af9fa68b435c5edbc58d285c3a5cc
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/373583
Reviewed-by: Danil Chapovalov <danilchap@webrtc.org>
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#43676}
This commit is contained in:
Boris Tsirkin 2025-01-07 02:46:23 -08:00 committed by WebRTC LUCI CQ
parent cd2b8ea4e2
commit 2becf23acc
24 changed files with 273 additions and 172 deletions

View File

@ -18,7 +18,8 @@
@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); @class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
@interface ARDAppClient () <ARDSignalingChannelDelegate, RTC_OBJC_TYPE (RTCPeerConnectionDelegate)> @interface ARDAppClient () <ARDSignalingChannelDelegate,
RTC_OBJC_TYPE (RTCPeerConnectionDelegate)>
// All properties should only be mutated from the main queue. // All properties should only be mutated from the main queue.
@property(nonatomic, strong) id<ARDRoomServerClient> roomServerClient; @property(nonatomic, strong) id<ARDRoomServerClient> roomServerClient;
@ -42,7 +43,8 @@
@property(nonatomic, strong) NSURL *webSocketRestURL; @property(nonatomic, strong) NSURL *webSocketRestURL;
@property(nonatomic, readonly) BOOL isLoopback; @property(nonatomic, readonly) BOOL isLoopback;
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCMediaConstraints) * defaultPeerConnectionConstraints; @property(nonatomic, strong) RTC_OBJC_TYPE(RTCMediaConstraints) *
defaultPeerConnectionConstraints;
- (instancetype)initWithRoomServerClient:(id<ARDRoomServerClient>)rsClient - (instancetype)initWithRoomServerClient:(id<ARDRoomServerClient>)rsClient
signalingChannel:(id<ARDSignalingChannel>)channel signalingChannel:(id<ARDSignalingChannel>)channel

View File

@ -33,12 +33,15 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) {
// main queue. // main queue.
@protocol ARDAppClientDelegate <NSObject> @protocol ARDAppClientDelegate <NSObject>
- (void)appClient:(ARDAppClient *)client didChangeState:(ARDAppClientState)state; - (void)appClient:(ARDAppClient *)client
didChangeState:(ARDAppClientState)state;
- (void)appClient:(ARDAppClient *)client didChangeConnectionState:(RTCIceConnectionState)state;
- (void)appClient:(ARDAppClient *)client - (void)appClient:(ARDAppClient *)client
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer; didChangeConnectionState:(RTCIceConnectionState)state;
- (void)appClient:(ARDAppClient *)client
didCreateLocalCapturer:
(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer;
- (void)appClient:(ARDAppClient *)client - (void)appClient:(ARDAppClient *)client
didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack; didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack;
@ -48,14 +51,17 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) {
- (void)appClient:(ARDAppClient *)client didError:(NSError *)error; - (void)appClient:(ARDAppClient *)client didError:(NSError *)error;
- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats; - (void)appClient:(ARDAppClient *)client
didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats;
@optional @optional
- (void)appClient:(ARDAppClient *)client - (void)appClient:(ARDAppClient *)client
didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer; didCreateLocalFileCapturer:
(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer;
- (void)appClient:(ARDAppClient *)client - (void)appClient:(ARDAppClient *)client
didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer; didCreateLocalExternalSampleCapturer:
(ARDExternalSampleCapturer *)externalSampleCapturer;
@end @end

View File

@ -15,7 +15,8 @@
// Controls the camera. Handles starting the capture, switching cameras etc. // Controls the camera. Handles starting the capture, switching cameras etc.
@interface ARDCaptureController : NSObject @interface ARDCaptureController : NSObject
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer - (instancetype)initWithCapturer:
(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
settings:(ARDSettingsModel *)settings; settings:(ARDSettingsModel *)settings;
- (void)startCapture; - (void)startCapture;
- (void)startCapture:(void (^)(NSError *))completion; - (void)startCapture:(void (^)(NSError *))completion;

View File

@ -18,12 +18,14 @@
- (void)joinRoomWithRoomId:(NSString *)roomId - (void)joinRoomWithRoomId:(NSString *)roomId
isLoopback:(BOOL)isLoopback isLoopback:(BOOL)isLoopback
completionHandler:(void (^)(ARDJoinResponse *response, NSError *error))completionHandler; completionHandler:(void (^)(ARDJoinResponse *response,
NSError *error))completionHandler;
- (void)sendMessage:(ARDSignalingMessage *)message - (void)sendMessage:(ARDSignalingMessage *)message
forRoomId:(NSString *)roomId forRoomId:(NSString *)roomId
clientId:(NSString *)clientId clientId:(NSString *)clientId
completionHandler:(void (^)(ARDMessageResponse *response, NSError *error))completionHandler; completionHandler:(void (^)(ARDMessageResponse *response,
NSError *error))completionHandler;
- (void)leaveRoomWithRoomId:(NSString *)roomId - (void)leaveRoomWithRoomId:(NSString *)roomId
clientId:(NSString *)clientId clientId:(NSString *)clientId

View File

@ -34,7 +34,8 @@ NS_ASSUME_NONNULL_BEGIN
/** /**
* Returns current video resolution string. * Returns current video resolution string.
* If no resolution is in store, default value of 640x480 is returned. * If no resolution is in store, default value of 640x480 is returned.
* When defaulting to value, the default is saved in store for consistency reasons. * When defaulting to value, the default is saved in store for consistency
* reasons.
*/ */
- (NSString *)currentVideoResolutionSettingFromStore; - (NSString *)currentVideoResolutionSettingFromStore;
- (int)currentVideoResolutionWidthFromStore; - (int)currentVideoResolutionWidthFromStore;
@ -56,7 +57,8 @@ NS_ASSUME_NONNULL_BEGIN
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)availableVideoCodecs; - (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)availableVideoCodecs;
/** /**
* Returns current video codec setting from store if present or default (H264) otherwise. * Returns current video codec setting from store if present or default (H264)
* otherwise.
*/ */
- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)currentVideoCodecSettingFromStore; - (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)currentVideoCodecSettingFromStore;
@ -83,7 +85,8 @@ NS_ASSUME_NONNULL_BEGIN
- (void)storeMaxBitrateSetting:(nullable NSNumber *)bitrate; - (void)storeMaxBitrateSetting:(nullable NSNumber *)bitrate;
/** /**
* Returns current audio only setting from store if present or default (NO) otherwise. * Returns current audio only setting from store if present or default (NO)
* otherwise.
*/ */
- (BOOL)currentAudioOnlySettingFromStore; - (BOOL)currentAudioOnlySettingFromStore;
@ -95,7 +98,8 @@ NS_ASSUME_NONNULL_BEGIN
- (void)storeAudioOnlySetting:(BOOL)audioOnly; - (void)storeAudioOnlySetting:(BOOL)audioOnly;
/** /**
* Returns current create AecDump setting from store if present or default (NO) otherwise. * Returns current create AecDump setting from store if present or default (NO)
* otherwise.
*/ */
- (BOOL)currentCreateAecDumpSettingFromStore; - (BOOL)currentCreateAecDumpSettingFromStore;
@ -107,8 +111,8 @@ NS_ASSUME_NONNULL_BEGIN
- (void)storeCreateAecDumpSetting:(BOOL)createAecDump; - (void)storeCreateAecDumpSetting:(BOOL)createAecDump;
/** /**
* Returns current setting whether to use manual audio config from store if present or default (YES) * Returns current setting whether to use manual audio config from store if
* otherwise. * present or default (YES) otherwise.
*/ */
- (BOOL)currentUseManualAudioConfigSettingFromStore; - (BOOL)currentUseManualAudioConfigSettingFromStore;

View File

@ -26,9 +26,11 @@ typedef NS_ENUM(NSInteger, ARDSignalingChannelState) {
@protocol ARDSignalingChannel; @protocol ARDSignalingChannel;
@protocol ARDSignalingChannelDelegate <NSObject> @protocol ARDSignalingChannelDelegate <NSObject>
- (void)channel:(id<ARDSignalingChannel>)channel didChangeState:(ARDSignalingChannelState)state; - (void)channel:(id<ARDSignalingChannel>)channel
didChangeState:(ARDSignalingChannelState)state;
- (void)channel:(id<ARDSignalingChannel>)channel didReceiveMessage:(ARDSignalingMessage *)message; - (void)channel:(id<ARDSignalingChannel>)channel
didReceiveMessage:(ARDSignalingMessage *)message;
@end @end

View File

@ -40,17 +40,21 @@ typedef enum {
@interface ARDICECandidateRemovalMessage : ARDSignalingMessage @interface ARDICECandidateRemovalMessage : ARDSignalingMessage
@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates; @property(nonatomic, readonly)
NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates;
- (instancetype)initWithRemovedCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates; - (instancetype)initWithRemovedCandidates:
(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates;
@end @end
@interface ARDSessionDescriptionMessage : ARDSignalingMessage @interface ARDSessionDescriptionMessage : ARDSignalingMessage
@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription; @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSessionDescription) *
sessionDescription;
- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description; - (instancetype)initWithDescription:
(RTC_OBJC_TYPE(RTCSessionDescription) *)description;
@end @end

View File

@ -17,7 +17,7 @@
@protocol ARDTURNClient <NSObject> @protocol ARDTURNClient <NSObject>
// Returns TURN server urls if successful. // Returns TURN server urls if successful.
- (void)requestServersWithCompletionHandler:(void (^)(NSArray *turnServers, - (void)requestServersWithCompletionHandler:
NSError *error))completionHandler; (void (^)(NSArray *turnServers, NSError *error))completionHandler;
@end @end

View File

@ -13,10 +13,12 @@
@interface RTC_OBJC_TYPE (RTCIceCandidate) @interface RTC_OBJC_TYPE (RTCIceCandidate)
(JSON) (JSON)
+ (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary; + (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary
: (NSDictionary *)dictionary;
+ (NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidatesFromJSONDictionary: + (NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidatesFromJSONDictionary:
(NSDictionary *)dictionary; (NSDictionary *)dictionary;
+ (NSData *)JSONDataForIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates + (NSData *)JSONDataForIceCandidates:
(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates
withType:(NSString *)typeValue; withType:(NSString *)typeValue;
- (NSData *)JSONData; - (NSData *)JSONData;

View File

@ -13,6 +13,7 @@
@interface RTC_OBJC_TYPE (RTCIceServer) @interface RTC_OBJC_TYPE (RTCIceServer)
(JSON) (JSON)
+ (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary; + (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary
: (NSDictionary *)dictionary;
@end @end

View File

@ -22,13 +22,15 @@
// Issues an asynchronous request that calls back on main queue. // Issues an asynchronous request that calls back on main queue.
+ (void)sendAsyncRequest:(NSURLRequest *)request + (void)sendAsyncRequest:(NSURLRequest *)request
completionHandler: completionHandler:(void (^)(NSURLResponse *response,
(void (^)(NSURLResponse *response, NSData *data, NSError *error))completionHandler; NSData *data,
NSError *error))completionHandler;
// Posts data to the specified URL. // Posts data to the specified URL.
+ (void)sendAsyncPostToURL:(NSURL *)url + (void)sendAsyncPostToURL:(NSURL *)url
withData:(NSData *)data withData:(NSData *)data
completionHandler:(void (^)(BOOL succeeded, NSData *data))completionHandler; completionHandler:
(void (^)(BOOL succeeded, NSData *data))completionHandler;
@end @end

View File

@ -25,7 +25,8 @@ NS_CLASS_AVAILABLE_IOS(10)
* *
* @param capturer The capturer to be controlled. * @param capturer The capturer to be controlled.
*/ */
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer; - (instancetype)initWithCapturer:
(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer;
/** /**
* Starts the file capturer. * Starts the file capturer.

View File

@ -14,7 +14,9 @@
@protocol ARDMainViewDelegate <NSObject> @protocol ARDMainViewDelegate <NSObject>
- (void)mainView:(ARDMainView *)mainView didInputRoom:(NSString *)room isLoopback:(BOOL)isLoopback; - (void)mainView:(ARDMainView *)mainView
didInputRoom:(NSString *)room
isLoopback:(BOOL)isLoopback;
- (void)mainViewDidToggleAudioLoop:(ARDMainView *)mainView; - (void)mainViewDidToggleAudioLoop:(ARDMainView *)mainView;
@end @end

View File

@ -39,8 +39,10 @@
@interface ARDVideoCallView : UIView @interface ARDVideoCallView : UIView
@property(nonatomic, readonly) UILabel *statusLabel; @property(nonatomic, readonly) UILabel *statusLabel;
@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCCameraPreviewView) * localVideoView; @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCCameraPreviewView) *
@property(nonatomic, readonly) __kindof UIView<RTC_OBJC_TYPE(RTCVideoRenderer)> *remoteVideoView; localVideoView;
@property(nonatomic, readonly)
__kindof UIView<RTC_OBJC_TYPE(RTCVideoRenderer)> *remoteVideoView;
@property(nonatomic, readonly) ARDStatsView *statsView; @property(nonatomic, readonly) ARDStatsView *statsView;
@property(nonatomic, weak) id<ARDVideoCallViewDelegate> delegate; @property(nonatomic, weak) id<ARDVideoCallViewDelegate> delegate;

View File

@ -17,7 +17,8 @@
@protocol ARDExternalSampleDelegate; @protocol ARDExternalSampleDelegate;
API_AVAILABLE(ios(10.0)) API_AVAILABLE(ios(10.0))
@interface ARDBroadcastSampleHandler : RPBroadcastSampleHandler <ARDAppClientDelegate> @interface ARDBroadcastSampleHandler
: RPBroadcastSampleHandler <ARDAppClientDelegate>
@property(nonatomic, strong) id<ARDExternalSampleDelegate> capturer; @property(nonatomic, strong) id<ARDExternalSampleDelegate> capturer;

View File

@ -12,6 +12,7 @@
#import <UIKit/UIKit.h> #import <UIKit/UIKit.h>
API_AVAILABLE(ios(11.0)) API_AVAILABLE(ios(11.0))
@interface ARDBroadcastSetupViewController : UIViewController <UITextFieldDelegate> @interface ARDBroadcastSetupViewController
: UIViewController <UITextFieldDelegate>
@end @end

View File

@ -35,7 +35,7 @@
isInitiator:(BOOL)isInitiator isInitiator:(BOOL)isInitiator
messages:(NSArray *)messages messages:(NSArray *)messages
messageHandler: messageHandler:
(void (^)(ARDSignalingMessage *))messageHandler { (void (^)(ARDSignalingMessage *))messageHandler {
id mockRoomServerClient = id mockRoomServerClient =
[OCMockObject mockForProtocol:@protocol(ARDRoomServerClient)]; [OCMockObject mockForProtocol:@protocol(ARDRoomServerClient)];
@ -69,9 +69,9 @@
messageHandler(message); messageHandler(message);
completionHandler(messageResponse, nil); completionHandler(messageResponse, nil);
}] sendMessage:[OCMArg any] }] sendMessage:[OCMArg any]
forRoomId:roomId forRoomId:roomId
clientId:clientId clientId:clientId
completionHandler:[OCMArg any]]; completionHandler:[OCMArg any]];
// Do nothing on leave. // Do nothing on leave.
[[[mockRoomServerClient stub] andDo:^(NSInvocation *invocation) { [[[mockRoomServerClient stub] andDo:^(NSInvocation *invocation) {
@ -89,8 +89,8 @@
- (id)mockSignalingChannelForRoomId:(NSString *)roomId - (id)mockSignalingChannelForRoomId:(NSString *)roomId
clientId:(NSString *)clientId clientId:(NSString *)clientId
messageHandler: messageHandler:(void (^)(ARDSignalingMessage *message))
(void (^)(ARDSignalingMessage *message))messageHandler { messageHandler {
id mockSignalingChannel = id mockSignalingChannel =
[OCMockObject niceMockForProtocol:@protocol(ARDSignalingChannel)]; [OCMockObject niceMockForProtocol:@protocol(ARDSignalingChannel)];
[[mockSignalingChannel stub] registerForRoomId:roomId clientId:clientId]; [[mockSignalingChannel stub] registerForRoomId:roomId clientId:clientId];
@ -103,8 +103,7 @@
} }
- (id)mockTURNClient { - (id)mockTURNClient {
id mockTURNClient = id mockTURNClient = [OCMockObject mockForProtocol:@protocol(ARDTURNClient)];
[OCMockObject mockForProtocol:@protocol(ARDTURNClient)];
[[[mockTURNClient stub] andDo:^(NSInvocation *invocation) { [[[mockTURNClient stub] andDo:^(NSInvocation *invocation) {
// Don't return anything in TURN response. // Don't return anything in TURN response.
__unsafe_unretained void (^completionHandler)(NSArray *turnServers, __unsafe_unretained void (^completionHandler)(NSArray *turnServers,
@ -124,24 +123,24 @@
return model; return model;
} }
- (ARDAppClient *)createAppClientForRoomId:(NSString *)roomId - (ARDAppClient *)
clientId:(NSString *)clientId createAppClientForRoomId:(NSString *)roomId
isInitiator:(BOOL)isInitiator clientId:(NSString *)clientId
messages:(NSArray *)messages isInitiator:(BOOL)isInitiator
messageHandler: messages:(NSArray *)messages
(void (^)(ARDSignalingMessage *message))messageHandler messageHandler:
connectedHandler:(void (^)(void))connectedHandler (void (^)(ARDSignalingMessage *message))messageHandler
localVideoTrackHandler:(void (^)(void))localVideoTrackHandler { connectedHandler:(void (^)(void))connectedHandler
localVideoTrackHandler:(void (^)(void))localVideoTrackHandler {
id turnClient = [self mockTURNClient]; id turnClient = [self mockTURNClient];
id signalingChannel = [self mockSignalingChannelForRoomId:roomId id signalingChannel = [self mockSignalingChannelForRoomId:roomId
clientId:clientId clientId:clientId
messageHandler:messageHandler]; messageHandler:messageHandler];
id roomServerClient = id roomServerClient = [self mockRoomServerClientForRoomId:roomId
[self mockRoomServerClientForRoomId:roomId clientId:clientId
clientId:clientId isInitiator:isInitiator
isInitiator:isInitiator messages:messages
messages:messages messageHandler:messageHandler];
messageHandler:messageHandler];
id delegate = id delegate =
[OCMockObject niceMockForProtocol:@protocol(ARDAppClientDelegate)]; [OCMockObject niceMockForProtocol:@protocol(ARDAppClientDelegate)];
[[[delegate stub] andDo:^(NSInvocation *invocation) { [[[delegate stub] andDo:^(NSInvocation *invocation) {
@ -150,8 +149,7 @@
didChangeConnectionState:RTCIceConnectionStateConnected]; didChangeConnectionState:RTCIceConnectionStateConnected];
[[[delegate stub] andDo:^(NSInvocation *invocation) { [[[delegate stub] andDo:^(NSInvocation *invocation) {
localVideoTrackHandler(); localVideoTrackHandler();
}] appClient:[OCMArg any] }] appClient:[OCMArg any] didReceiveLocalVideoTrack:[OCMArg any]];
didReceiveLocalVideoTrack:[OCMArg any]];
return [[ARDAppClient alloc] initWithRoomServerClient:roomServerClient return [[ARDAppClient alloc] initWithRoomServerClient:roomServerClient
signalingChannel:signalingChannel signalingChannel:signalingChannel
@ -183,55 +181,68 @@
[self expectationWithDescription:@"Answerer PC connected"]; [self expectationWithDescription:@"Answerer PC connected"];
caller = [self createAppClientForRoomId:roomId caller = [self createAppClientForRoomId:roomId
clientId:callerId clientId:callerId
isInitiator:YES isInitiator:YES
messages:[NSArray array] messages:[NSArray array]
messageHandler:^(ARDSignalingMessage *message) { messageHandler:^(ARDSignalingMessage *message) {
ARDAppClient *strongAnswerer = weakAnswerer; ARDAppClient *strongAnswerer = weakAnswerer;
[strongAnswerer channel:strongAnswerer.channel didReceiveMessage:message]; [strongAnswerer channel:strongAnswerer.channel
} connectedHandler:^{ didReceiveMessage:message];
[callerConnectionExpectation fulfill]; }
} localVideoTrackHandler:^{ connectedHandler:^{
}]; [callerConnectionExpectation fulfill];
}
localVideoTrackHandler:^{
}];
// TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion // TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion
// crash in Debug. // crash in Debug.
caller.defaultPeerConnectionConstraints = caller.defaultPeerConnectionConstraints = [[RTC_OBJC_TYPE(RTCMediaConstraints)
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil alloc] initWithMandatoryConstraints:nil optionalConstraints:nil];
optionalConstraints:nil];
weakCaller = caller; weakCaller = caller;
answerer = [self createAppClientForRoomId:roomId answerer = [self createAppClientForRoomId:roomId
clientId:answererId clientId:answererId
isInitiator:NO isInitiator:NO
messages:[NSArray array] messages:[NSArray array]
messageHandler:^(ARDSignalingMessage *message) { messageHandler:^(ARDSignalingMessage *message) {
ARDAppClient *strongCaller = weakCaller; ARDAppClient *strongCaller = weakCaller;
[strongCaller channel:strongCaller.channel didReceiveMessage:message]; [strongCaller channel:strongCaller.channel didReceiveMessage:message];
} connectedHandler:^{ }
[answererConnectionExpectation fulfill]; connectedHandler:^{
} localVideoTrackHandler:^{ [answererConnectionExpectation fulfill];
}]; }
localVideoTrackHandler:^{
}];
// TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion // TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion
// crash in Debug. // crash in Debug.
answerer.defaultPeerConnectionConstraints = answerer.defaultPeerConnectionConstraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
optionalConstraints:nil]; initWithMandatoryConstraints:nil
optionalConstraints:nil];
weakAnswerer = answerer; weakAnswerer = answerer;
// Kick off connection. // Kick off connection.
[caller connectToRoomWithId:roomId settings:[self mockSettingsModel] isLoopback:NO]; [caller connectToRoomWithId:roomId
[answerer connectToRoomWithId:roomId settings:[self mockSettingsModel] isLoopback:NO]; settings:[self mockSettingsModel]
[self waitForExpectationsWithTimeout:20 handler:^(NSError *error) { isLoopback:NO];
if (error) { [answerer connectToRoomWithId:roomId
XCTFail(@"Expectation failed with error %@.", error); settings:[self mockSettingsModel]
} isLoopback:NO];
}]; [self waitForExpectationsWithTimeout:20
handler:^(NSError *error) {
if (error) {
XCTFail(@"Expectation failed with error %@.",
error);
}
}];
} }
// Test to see that we get a local video connection // Test to see that we get a local video connection
// Note this will currently pass even when no camera is connected as a local // Note this will currently pass even when no camera is connected as a local
// video track is created regardless (Perhaps there should be a test for that...) // video track is created regardless (Perhaps there should be a test for
#if !TARGET_IPHONE_SIMULATOR // Expect to fail on simulator due to no camera support // that...)
#if !TARGET_IPHONE_SIMULATOR // Expect to fail on simulator due to no camera
// support
- (void)testSessionShouldGetLocalVideoTrackCallback { - (void)testSessionShouldGetLocalVideoTrackCallback {
ARDAppClient *caller = nil; ARDAppClient *caller = nil;
NSString *roomId = @"testRoom"; NSString *roomId = @"testRoom";
@ -241,25 +252,31 @@
[self expectationWithDescription:@"Caller got local video."]; [self expectationWithDescription:@"Caller got local video."];
caller = [self createAppClientForRoomId:roomId caller = [self createAppClientForRoomId:roomId
clientId:callerId clientId:callerId
isInitiator:YES isInitiator:YES
messages:[NSArray array] messages:[NSArray array]
messageHandler:^(ARDSignalingMessage *message) {} messageHandler:^(ARDSignalingMessage *message) {
connectedHandler:^{} }
localVideoTrackHandler:^{ [localVideoTrackExpectation fulfill]; }]; connectedHandler:^{
caller.defaultPeerConnectionConstraints = }
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil localVideoTrackHandler:^{
optionalConstraints:nil]; [localVideoTrackExpectation fulfill];
}];
caller.defaultPeerConnectionConstraints = [[RTC_OBJC_TYPE(RTCMediaConstraints)
alloc] initWithMandatoryConstraints:nil optionalConstraints:nil];
// Kick off connection. // Kick off connection.
[caller connectToRoomWithId:roomId [caller connectToRoomWithId:roomId
settings:[self mockSettingsModel] settings:[self mockSettingsModel]
isLoopback:NO]; isLoopback:NO];
[self waitForExpectationsWithTimeout:20 handler:^(NSError *error) { [self waitForExpectationsWithTimeout:20
if (error) { handler:^(NSError *error) {
XCTFail("Expectation timed out with error: %@.", error); if (error) {
} XCTFail(
}]; "Expectation timed out with error: %@.",
error);
}
}];
} }
#endif #endif

View File

@ -31,7 +31,8 @@ NS_CLASS_AVAILABLE_IOS(10)
- (void)setUp { - (void)setUp {
[super setUp]; [super setUp];
self.fileCapturerMock = OCMClassMock([RTC_OBJC_TYPE(RTCFileVideoCapturer) class]); self.fileCapturerMock =
OCMClassMock([RTC_OBJC_TYPE(RTCFileVideoCapturer) class]);
self.fileCaptureController = self.fileCaptureController =
[[ARDFileCaptureController alloc] initWithCapturer:self.fileCapturerMock]; [[ARDFileCaptureController alloc] initWithCapturer:self.fileCapturerMock];
} }
@ -44,7 +45,8 @@ NS_CLASS_AVAILABLE_IOS(10)
} }
- (void)testCaptureIsStarted { - (void)testCaptureIsStarted {
[[self.fileCapturerMock expect] startCapturingFromFileNamed:[OCMArg any] onError:[OCMArg any]]; [[self.fileCapturerMock expect] startCapturingFromFileNamed:[OCMArg any]
onError:[OCMArg any]];
[self.fileCaptureController startCapture]; [self.fileCaptureController startCapture];

View File

@ -17,7 +17,6 @@
#import "ARDSettingsModel+Private.h" #import "ARDSettingsModel+Private.h"
#import "ARDSettingsStore.h" #import "ARDSettingsStore.h"
@interface ARDSettingsModelTests : XCTestCase { @interface ARDSettingsModelTests : XCTestCase {
ARDSettingsModel *_model; ARDSettingsModel *_model;
} }
@ -50,13 +49,13 @@
- (void)testStoringInvalidConstraintReturnsNo { - (void)testStoringInvalidConstraintReturnsNo {
id storeMock = [self setupMockStore]; id storeMock = [self setupMockStore];
[([[storeMock stub] andReturn:@"960x480"])videoResolution]; [([[storeMock stub] andReturn:@"960x480"]) videoResolution];
XCTAssertFalse([_model storeVideoResolutionSetting:@"960x480"]); XCTAssertFalse([_model storeVideoResolutionSetting:@"960x480"]);
} }
- (void)testWidthConstraintFromStore { - (void)testWidthConstraintFromStore {
id storeMock = [self setupMockStore]; id storeMock = [self setupMockStore];
[([[storeMock stub] andReturn:@"1270x480"])videoResolution]; [([[storeMock stub] andReturn:@"1270x480"]) videoResolution];
int width = [_model currentVideoResolutionWidthFromStore]; int width = [_model currentVideoResolutionWidthFromStore];
XCTAssertEqual(width, 1270); XCTAssertEqual(width, 1270);
@ -64,7 +63,7 @@
- (void)testHeightConstraintFromStore { - (void)testHeightConstraintFromStore {
id storeMock = [self setupMockStore]; id storeMock = [self setupMockStore];
[([[storeMock stub] andReturn:@"960x540"])videoResolution]; [([[storeMock stub] andReturn:@"960x540"]) videoResolution];
int height = [_model currentVideoResolutionHeightFromStore]; int height = [_model currentVideoResolutionHeightFromStore];
XCTAssertEqual(height, 540); XCTAssertEqual(height, 540);
@ -72,7 +71,7 @@
- (void)testConstraintComponentIsNilWhenInvalidConstraintString { - (void)testConstraintComponentIsNilWhenInvalidConstraintString {
id storeMock = [self setupMockStore]; id storeMock = [self setupMockStore];
[([[storeMock stub] andReturn:@"invalid"])videoResolution]; [([[storeMock stub] andReturn:@"invalid"]) videoResolution];
int width = [_model currentVideoResolutionWidthFromStore]; int width = [_model currentVideoResolutionWidthFromStore];
XCTAssertEqual(width, 0); XCTAssertEqual(width, 0);

View File

@ -59,7 +59,8 @@ extern NSString *const SRHTTPResponseErrorKey;
// It will be nil until after the handshake completes. // It will be nil until after the handshake completes.
@property(nonatomic, readonly, copy) NSString *protocol; @property(nonatomic, readonly, copy) NSString *protocol;
// Protocols should be an array of strings that turn into Sec-WebSocket-Protocol. // Protocols should be an array of strings that turn into
// Sec-WebSocket-Protocol.
- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols; - (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols;
- (id)initWithURLRequest:(NSURLRequest *)request; - (id)initWithURLRequest:(NSURLRequest *)request;
@ -72,11 +73,13 @@ extern NSString *const SRHTTPResponseErrorKey;
- (void)setDelegateOperationQueue:(NSOperationQueue *)queue; - (void)setDelegateOperationQueue:(NSOperationQueue *)queue;
- (void)setDelegateDispatchQueue:(dispatch_queue_t)queue; - (void)setDelegateDispatchQueue:(dispatch_queue_t)queue;
// By default, it will schedule itself on +[NSRunLoop SR_networkRunLoop] using defaultModes. // By default, it will schedule itself on +[NSRunLoop SR_networkRunLoop] using
// defaultModes.
- (void)scheduleInRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode; - (void)scheduleInRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode;
- (void)unscheduleFromRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode; - (void)unscheduleFromRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode;
// SRWebSockets are intended for one-time-use only. Open should be called once and only once. // SRWebSockets are intended for one-time-use only. Open should be called once
// and only once.
- (void)open; - (void)open;
- (void)close; - (void)close;

View File

@ -23,7 +23,8 @@
@property(nonatomic) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer; @property(nonatomic) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer;
@property(nonatomic) RTC_OBJC_TYPE(RTCCameraPreviewView) * localVideoView; @property(nonatomic) RTC_OBJC_TYPE(RTCCameraPreviewView) * localVideoView;
@property(nonatomic) __kindof UIView<RTC_OBJC_TYPE(RTCVideoRenderer)> *remoteVideoView; @property(nonatomic)
__kindof UIView<RTC_OBJC_TYPE(RTCVideoRenderer)> *remoteVideoView;
@property(nonatomic) UIButton *callButton; @property(nonatomic) UIButton *callButton;
@property(nonatomic) UIButton *hangUpButton; @property(nonatomic) UIButton *hangUpButton;
@ -46,18 +47,22 @@
- (void)loadView { - (void)loadView {
_view = [[UIView alloc] initWithFrame:CGRectZero]; _view = [[UIView alloc] initWithFrame:CGRectZero];
_remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero]; _remoteVideoView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
_remoteVideoView.translatesAutoresizingMaskIntoConstraints = NO; _remoteVideoView.translatesAutoresizingMaskIntoConstraints = NO;
[_view addSubview:_remoteVideoView]; [_view addSubview:_remoteVideoView];
_localVideoView = [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero]; _localVideoView =
[[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero];
_localVideoView.translatesAutoresizingMaskIntoConstraints = NO; _localVideoView.translatesAutoresizingMaskIntoConstraints = NO;
[_view addSubview:_localVideoView]; [_view addSubview:_localVideoView];
_callButton = [UIButton buttonWithType:UIButtonTypeSystem]; _callButton = [UIButton buttonWithType:UIButtonTypeSystem];
_callButton.translatesAutoresizingMaskIntoConstraints = NO; _callButton.translatesAutoresizingMaskIntoConstraints = NO;
[_callButton setTitle:@"Call" forState:UIControlStateNormal]; [_callButton setTitle:@"Call" forState:UIControlStateNormal];
[_callButton addTarget:self action:@selector(call:) forControlEvents:UIControlEventTouchUpInside]; [_callButton addTarget:self
action:@selector(call:)
forControlEvents:UIControlEventTouchUpInside];
[_view addSubview:_callButton]; [_view addSubview:_callButton];
_hangUpButton = [UIButton buttonWithType:UIButtonTypeSystem]; _hangUpButton = [UIButton buttonWithType:UIButtonTypeSystem];
@ -69,27 +74,40 @@
[_view addSubview:_hangUpButton]; [_view addSubview:_hangUpButton];
UILayoutGuide *margin = _view.layoutMarginsGuide; UILayoutGuide *margin = _view.layoutMarginsGuide;
[_remoteVideoView.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES; [_remoteVideoView.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor]
[_remoteVideoView.topAnchor constraintEqualToAnchor:margin.topAnchor].active = YES; .active = YES;
[_remoteVideoView.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES; [_remoteVideoView.topAnchor constraintEqualToAnchor:margin.topAnchor].active =
[_remoteVideoView.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor].active = YES;
[_localVideoView.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor constant:8.0].active =
YES; YES;
[_localVideoView.topAnchor constraintEqualToAnchor:margin.topAnchor constant:8.0].active = YES; [_remoteVideoView.trailingAnchor
constraintEqualToAnchor:margin.trailingAnchor]
.active = YES;
[_remoteVideoView.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor]
.active = YES;
[_localVideoView.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor
constant:8.0]
.active = YES;
[_localVideoView.topAnchor constraintEqualToAnchor:margin.topAnchor
constant:8.0]
.active = YES;
[_localVideoView.widthAnchor constraintEqualToConstant:60].active = YES; [_localVideoView.widthAnchor constraintEqualToConstant:60].active = YES;
[_localVideoView.heightAnchor constraintEqualToConstant:60].active = YES; [_localVideoView.heightAnchor constraintEqualToConstant:60].active = YES;
[_callButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor constant:8.0].active = [_callButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor
YES; constant:8.0]
[_callButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:8.0].active = YES; .active = YES;
[_callButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor
constant:8.0]
.active = YES;
[_callButton.widthAnchor constraintEqualToConstant:100].active = YES; [_callButton.widthAnchor constraintEqualToConstant:100].active = YES;
[_callButton.heightAnchor constraintEqualToConstant:40].active = YES; [_callButton.heightAnchor constraintEqualToConstant:40].active = YES;
[_hangUpButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor constant:8.0].active = [_hangUpButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor
YES; constant:8.0]
[_hangUpButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:8.0].active = .active = YES;
YES; [_hangUpButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor
constant:8.0]
.active = YES;
[_hangUpButton.widthAnchor constraintEqualToConstant:100].active = YES; [_hangUpButton.widthAnchor constraintEqualToConstant:100].active = YES;
[_hangUpButton.heightAnchor constraintEqualToConstant:40].active = YES; [_hangUpButton.heightAnchor constraintEqualToConstant:40].active = YES;
@ -120,20 +138,27 @@
int targetHeight = 480; int targetHeight = 480;
int currentDiff = INT_MAX; int currentDiff = INT_MAX;
NSArray<AVCaptureDeviceFormat *> *formats = NSArray<AVCaptureDeviceFormat *> *formats =
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:selectedDevice]; [RTC_OBJC_TYPE(RTCCameraVideoCapturer)
supportedFormatsForDevice:selectedDevice];
for (AVCaptureDeviceFormat *format in formats) { for (AVCaptureDeviceFormat *format in formats) {
CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); CMVideoDimensions dimension =
FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); CMVideoFormatDescriptionGetDimensions(format.formatDescription);
int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height); FourCharCode pixelFormat =
CMFormatDescriptionGetMediaSubType(format.formatDescription);
int diff = abs(targetWidth - dimension.width) +
abs(targetHeight - dimension.height);
if (diff < currentDiff) { if (diff < currentDiff) {
selectedFormat = format; selectedFormat = format;
currentDiff = diff; currentDiff = diff;
} else if (diff == currentDiff && pixelFormat == [_capturer preferredOutputPixelFormat]) { } else if (diff == currentDiff &&
pixelFormat == [_capturer preferredOutputPixelFormat]) {
selectedFormat = format; selectedFormat = format;
} }
} }
[self.capturer startCaptureWithDevice:selectedDevice format:selectedFormat fps:30]; [self.capturer startCaptureWithDevice:selectedDevice
format:selectedFormat
fps:30];
} }
- (void)didReceiveMemoryWarning { - (void)didReceiveMemoryWarning {

View File

@ -40,14 +40,17 @@ class ObjCCallClient {
public: public:
explicit PCObserver(ObjCCallClient* client); explicit PCObserver(ObjCCallClient* client);
void OnSignalingChange(webrtc::PeerConnectionInterface::SignalingState new_state) override; void OnSignalingChange(
void OnDataChannel(rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) override; webrtc::PeerConnectionInterface::SignalingState new_state) override;
void OnDataChannel(
rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) override;
void OnRenegotiationNeeded() override; void OnRenegotiationNeeded() override;
void OnIceConnectionChange( void OnIceConnectionChange(
webrtc::PeerConnectionInterface::IceConnectionState new_state) override; webrtc::PeerConnectionInterface::IceConnectionState new_state) override;
void OnIceGatheringChange( void OnIceGatheringChange(
webrtc::PeerConnectionInterface::IceGatheringState new_state) override; webrtc::PeerConnectionInterface::IceGatheringState new_state) override;
void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override; void OnIceCandidate(
const webrtc::IceCandidateInterface* candidate) override;
private: private:
ObjCCallClient* const client_; ObjCCallClient* const client_;
@ -63,10 +66,12 @@ class ObjCCallClient {
const std::unique_ptr<PCObserver> pc_observer_; const std::unique_ptr<PCObserver> pc_observer_;
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf_ RTC_GUARDED_BY(thread_checker_); rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf_
RTC_GUARDED_BY(thread_checker_);
std::unique_ptr<rtc::Thread> network_thread_ RTC_GUARDED_BY(thread_checker_); std::unique_ptr<rtc::Thread> network_thread_ RTC_GUARDED_BY(thread_checker_);
std::unique_ptr<rtc::Thread> worker_thread_ RTC_GUARDED_BY(thread_checker_); std::unique_ptr<rtc::Thread> worker_thread_ RTC_GUARDED_BY(thread_checker_);
std::unique_ptr<rtc::Thread> signaling_thread_ RTC_GUARDED_BY(thread_checker_); std::unique_ptr<rtc::Thread> signaling_thread_
RTC_GUARDED_BY(thread_checker_);
std::unique_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> remote_sink_ std::unique_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> remote_sink_
RTC_GUARDED_BY(thread_checker_); RTC_GUARDED_BY(thread_checker_);
@ -74,7 +79,8 @@ class ObjCCallClient {
RTC_GUARDED_BY(thread_checker_); RTC_GUARDED_BY(thread_checker_);
webrtc::Mutex pc_mutex_; webrtc::Mutex pc_mutex_;
rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_ RTC_GUARDED_BY(pc_mutex_); rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_
RTC_GUARDED_BY(pc_mutex_);
}; };
} // namespace webrtc_examples } // namespace webrtc_examples

View File

@ -37,7 +37,8 @@ namespace {
class CreateOfferObserver : public webrtc::CreateSessionDescriptionObserver { class CreateOfferObserver : public webrtc::CreateSessionDescriptionObserver {
public: public:
explicit CreateOfferObserver(rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc); explicit CreateOfferObserver(
rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc);
void OnSuccess(webrtc::SessionDescriptionInterface* desc) override; void OnSuccess(webrtc::SessionDescriptionInterface* desc) override;
void OnFailure(webrtc::RTCError error) override; void OnFailure(webrtc::RTCError error) override;
@ -46,12 +47,14 @@ class CreateOfferObserver : public webrtc::CreateSessionDescriptionObserver {
const rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_; const rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_;
}; };
class SetRemoteSessionDescriptionObserver : public webrtc::SetRemoteDescriptionObserverInterface { class SetRemoteSessionDescriptionObserver
: public webrtc::SetRemoteDescriptionObserverInterface {
public: public:
void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override; void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override;
}; };
class SetLocalSessionDescriptionObserver : public webrtc::SetLocalDescriptionObserverInterface { class SetLocalSessionDescriptionObserver
: public webrtc::SetLocalDescriptionObserverInterface {
public: public:
void OnSetLocalDescriptionComplete(webrtc::RTCError error) override; void OnSetLocalDescriptionComplete(webrtc::RTCError error) override;
}; };
@ -77,8 +80,8 @@ void ObjCCallClient::Call(RTC_OBJC_TYPE(RTCVideoCapturer) * capturer,
remote_sink_ = webrtc::ObjCToNativeVideoRenderer(remote_renderer); remote_sink_ = webrtc::ObjCToNativeVideoRenderer(remote_renderer);
video_source_ = video_source_ = webrtc::ObjCToNativeVideoCapturer(
webrtc::ObjCToNativeVideoCapturer(capturer, signaling_thread_.get(), worker_thread_.get()); capturer, signaling_thread_.get(), worker_thread_.get());
CreatePeerConnection(); CreatePeerConnection();
Connect(); Connect();
@ -119,15 +122,19 @@ void ObjCCallClient::CreatePeerConnectionFactory() {
dependencies.worker_thread = worker_thread_.get(); dependencies.worker_thread = worker_thread_.get();
dependencies.signaling_thread = signaling_thread_.get(); dependencies.signaling_thread = signaling_thread_.get();
dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(); dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory();
dependencies.audio_encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory(); dependencies.audio_encoder_factory =
dependencies.audio_decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory(); webrtc::CreateBuiltinAudioEncoderFactory();
dependencies.audio_decoder_factory =
webrtc::CreateBuiltinAudioDecoderFactory();
dependencies.video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory( dependencies.video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory(
[[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init]); [[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init]);
dependencies.video_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory( dependencies.video_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(
[[RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) alloc] init]); [[RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) alloc] init]);
dependencies.audio_processing_builder = std::make_unique<webrtc::BuiltinAudioProcessingBuilder>(); dependencies.audio_processing_builder =
std::make_unique<webrtc::BuiltinAudioProcessingBuilder>();
webrtc::EnableMedia(dependencies); webrtc::EnableMedia(dependencies);
dependencies.event_log_factory = std::make_unique<webrtc::RtcEventLogFactory>(); dependencies.event_log_factory =
std::make_unique<webrtc::RtcEventLogFactory>();
pcf_ = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies)); pcf_ = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies));
RTC_LOG(LS_INFO) << "PeerConnectionFactory created: " << pcf_.get(); RTC_LOG(LS_INFO) << "PeerConnectionFactory created: " << pcf_.get();
} }
@ -141,7 +148,8 @@ void ObjCCallClient::CreatePeerConnection() {
options.disable_encryption = true; options.disable_encryption = true;
pcf_->SetOptions(options); pcf_->SetOptions(options);
webrtc::PeerConnectionDependencies pc_dependencies(pc_observer_.get()); webrtc::PeerConnectionDependencies pc_dependencies(pc_observer_.get());
pc_ = pcf_->CreatePeerConnectionOrError(config, std::move(pc_dependencies)).MoveValue(); pc_ = pcf_->CreatePeerConnectionOrError(config, std::move(pc_dependencies))
.MoveValue();
RTC_LOG(LS_INFO) << "PeerConnection created: " << pc_.get(); RTC_LOG(LS_INFO) << "PeerConnection created: " << pc_.get();
rtc::scoped_refptr<webrtc::VideoTrackInterface> local_video_track = rtc::scoped_refptr<webrtc::VideoTrackInterface> local_video_track =
@ -151,8 +159,10 @@ void ObjCCallClient::CreatePeerConnection() {
for (const rtc::scoped_refptr<webrtc::RtpTransceiverInterface>& tranceiver : for (const rtc::scoped_refptr<webrtc::RtpTransceiverInterface>& tranceiver :
pc_->GetTransceivers()) { pc_->GetTransceivers()) {
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> track = tranceiver->receiver()->track(); rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> track =
if (track && track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) { tranceiver->receiver()->track();
if (track &&
track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
static_cast<webrtc::VideoTrackInterface*>(track.get()) static_cast<webrtc::VideoTrackInterface*>(track.get())
->AddOrUpdateSink(remote_sink_.get(), rtc::VideoSinkWants()); ->AddOrUpdateSink(remote_sink_.get(), rtc::VideoSinkWants());
RTC_LOG(LS_INFO) << "Remote video sink set up: " << track.get(); RTC_LOG(LS_INFO) << "Remote video sink set up: " << track.get();
@ -167,7 +177,8 @@ void ObjCCallClient::Connect() {
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions()); webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
} }
ObjCCallClient::PCObserver::PCObserver(ObjCCallClient* client) : client_(client) {} ObjCCallClient::PCObserver::PCObserver(ObjCCallClient* client)
: client_(client) {}
void ObjCCallClient::PCObserver::OnSignalingChange( void ObjCCallClient::PCObserver::OnSignalingChange(
webrtc::PeerConnectionInterface::SignalingState new_state) { webrtc::PeerConnectionInterface::SignalingState new_state) {
@ -193,14 +204,16 @@ void ObjCCallClient::PCObserver::OnIceGatheringChange(
RTC_LOG(LS_INFO) << "OnIceGatheringChange: " << new_state; RTC_LOG(LS_INFO) << "OnIceGatheringChange: " << new_state;
} }
void ObjCCallClient::PCObserver::OnIceCandidate(const webrtc::IceCandidateInterface* candidate) { void ObjCCallClient::PCObserver::OnIceCandidate(
const webrtc::IceCandidateInterface* candidate) {
RTC_LOG(LS_INFO) << "OnIceCandidate: " << candidate->server_url(); RTC_LOG(LS_INFO) << "OnIceCandidate: " << candidate->server_url();
webrtc::MutexLock lock(&client_->pc_mutex_); webrtc::MutexLock lock(&client_->pc_mutex_);
RTC_DCHECK(client_->pc_ != nullptr); RTC_DCHECK(client_->pc_ != nullptr);
client_->pc_->AddIceCandidate(candidate); client_->pc_->AddIceCandidate(candidate);
} }
CreateOfferObserver::CreateOfferObserver(rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc) CreateOfferObserver::CreateOfferObserver(
rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc)
: pc_(pc) {} : pc_(pc) {}
void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) { void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) {
@ -209,25 +222,29 @@ void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) {
RTC_LOG(LS_INFO) << "Created offer: " << sdp; RTC_LOG(LS_INFO) << "Created offer: " << sdp;
// Ownership of desc was transferred to us, now we transfer it forward. // Ownership of desc was transferred to us, now we transfer it forward.
pc_->SetLocalDescription(absl::WrapUnique(desc), pc_->SetLocalDescription(
rtc::make_ref_counted<SetLocalSessionDescriptionObserver>()); absl::WrapUnique(desc),
rtc::make_ref_counted<SetLocalSessionDescriptionObserver>());
// Generate a fake answer. // Generate a fake answer.
std::unique_ptr<webrtc::SessionDescriptionInterface> answer( std::unique_ptr<webrtc::SessionDescriptionInterface> answer(
webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp)); webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp));
pc_->SetRemoteDescription(std::move(answer), pc_->SetRemoteDescription(
rtc::make_ref_counted<SetRemoteSessionDescriptionObserver>()); std::move(answer),
rtc::make_ref_counted<SetRemoteSessionDescriptionObserver>());
} }
void CreateOfferObserver::OnFailure(webrtc::RTCError error) { void CreateOfferObserver::OnFailure(webrtc::RTCError error) {
RTC_LOG(LS_INFO) << "Failed to create offer: " << error.message(); RTC_LOG(LS_INFO) << "Failed to create offer: " << error.message();
} }
void SetRemoteSessionDescriptionObserver::OnSetRemoteDescriptionComplete(webrtc::RTCError error) { void SetRemoteSessionDescriptionObserver::OnSetRemoteDescriptionComplete(
webrtc::RTCError error) {
RTC_LOG(LS_INFO) << "Set remote description: " << error.message(); RTC_LOG(LS_INFO) << "Set remote description: " << error.message();
} }
void SetLocalSessionDescriptionObserver::OnSetLocalDescriptionComplete(webrtc::RTCError error) { void SetLocalSessionDescriptionObserver::OnSetLocalDescriptionComplete(
webrtc::RTCError error) {
RTC_LOG(LS_INFO) << "Set local description: " << error.message(); RTC_LOG(LS_INFO) << "Set local description: " << error.message();
} }

View File

@ -33,7 +33,6 @@ typedef int NativeSocket;
#endif #endif
#endif #endif
class SocketBase { class SocketBase {
public: public:
SocketBase() : socket_(INVALID_SOCKET) {} SocketBase() : socket_(INVALID_SOCKET) {}