Broadcast extension for AppRTCMobile on iOS

This provides an environment for testing out using WebRTC from an iOS
extension. It implements a ReplayKit broadcast extension for live
streaming games and screensharing.

The extension is only supported on iOS 11+ and is guarded by a build
flag.

Bug: webrtc:9335
Change-Id: Id218d6c73ef7599f5953c5a1e0e62e5d0dc4f10b
Reviewed-on: https://webrtc-review.googlesource.com/80000
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Reviewed-by: Patrik Höglund <phoglund@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23504}
This commit is contained in:
Anders Carlsson 2018-06-04 10:24:37 +02:00 committed by Commit Bot
parent 27fe43a1aa
commit 358f2e0760
18 changed files with 572 additions and 18 deletions

View File

@ -245,6 +245,8 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
"objc/AppRTCMobile/ARDBitrateTracker.m",
"objc/AppRTCMobile/ARDCaptureController.h",
"objc/AppRTCMobile/ARDCaptureController.m",
"objc/AppRTCMobile/ARDExternalSampleCapturer.h",
"objc/AppRTCMobile/ARDExternalSampleCapturer.m",
"objc/AppRTCMobile/ARDJoinResponse+Internal.h",
"objc/AppRTCMobile/ARDJoinResponse.h",
"objc/AppRTCMobile/ARDJoinResponse.m",
@ -297,6 +299,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
"../sdk:peerconnectionfactory_base_objc",
"../sdk:videocapture_objc",
"../sdk:videocodec_objc",
"../sdk:videoframebuffer_objc",
"../sdk:videosource_objc",
]
}
@ -365,11 +368,91 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
"../sdk:framework_objc",
]
if (rtc_apprtcmobile_broadcast_extension) {
deps += [
":AppRTCMobileBroadcastSetupUI_extension_bundle",
":AppRTCMobileBroadcastUpload_extension_bundle",
]
}
if (target_cpu == "x86") {
deps += [ "//testing/iossim:iossim" ]
}
}
if (rtc_apprtcmobile_broadcast_extension) {
bundle_data("AppRTCMobileBroadcastUpload_extension_bundle") {
testonly = true
public_deps = [
":AppRTCMobileBroadcastUpload",
]
sources = [
"$root_out_dir/AppRTCMobileBroadcastUpload.appex",
]
outputs = [
"{{bundle_plugins_dir}}/{{source_file_part}}",
]
}
bundle_data("AppRTCMobileBroadcastSetupUI_extension_bundle") {
testonly = true
public_deps = [
":AppRTCMobileBroadcastSetupUI",
]
sources = [
"$root_out_dir/AppRTCMobileBroadcastSetupUI.appex",
]
outputs = [
"{{bundle_plugins_dir}}/{{source_file_part}}",
]
}
rtc_static_library("AppRTCMobileBroadcastUpload_lib") {
check_includes = false
testonly = true
sources = [
"objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h",
"objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m",
]
deps = [
":AppRTCMobile_ios_frameworks",
":apprtc_signaling",
"../sdk:framework_objc",
]
libs = [ "ReplayKit.framework" ]
}
ios_appex_bundle("AppRTCMobileBroadcastUpload") {
testonly = true
configs += [ "..:common_config" ]
public_configs = [ "..:common_inherited_config" ]
info_plist = "objc/AppRTCMobile/ios/broadcast_extension/BroadcastUploadInfo.plist"
deps = [
":AppRTCMobileBroadcastUpload_lib",
"../sdk:framework_objc",
]
}
ios_appex_bundle("AppRTCMobileBroadcastSetupUI") {
sources = [
"objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h",
"objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m",
]
info_plist = "objc/AppRTCMobile/ios/broadcast_extension/BroadcastSetupUIInfo.plist"
libs = [ "ReplayKit.framework" ]
deps = [
":AppRTCMobile_ios_bundle_data",
]
}
}
bundle_data("AppRTCMobile_ios_frameworks") {
deps = [
"../sdk:framework_objc+link",

View File

@ -23,6 +23,7 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) {
@class ARDAppClient;
@class ARDSettingsModel;
@class ARDExternalSampleCapturer;
@class RTCMediaConstraints;
@class RTCCameraVideoCapturer;
@class RTCFileVideoCapturer;
@ -56,6 +57,9 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) {
- (void)appClient:(ARDAppClient *)client
didCreateLocalFileCapturer:(RTCFileVideoCapturer *)fileCapturer;
- (void)appClient:(ARDAppClient *)client
didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer;
@end
// Handles connections to the AppRTC server for a given room. Methods on this
@ -67,6 +71,8 @@ didCreateLocalFileCapturer:(RTCFileVideoCapturer *)fileCapturer;
@property(nonatomic, assign) BOOL shouldGetStats;
@property(nonatomic, readonly) ARDAppClientState state;
@property(nonatomic, weak) id<ARDAppClientDelegate> delegate;
@property(nonatomic, assign, getter=isBroadcast) BOOL broadcast;
// Convenience constructor since all expected use cases will need a delegate
// in order to receive remote tracks.
- (instancetype)initWithDelegate:(id<ARDAppClientDelegate>)delegate;

View File

@ -28,6 +28,7 @@
#import "WebRTC/RTCVideoTrack.h"
#import "ARDAppEngineClient.h"
#import "ARDExternalSampleCapturer.h"
#import "ARDJoinResponse.h"
#import "ARDMessageResponse.h"
#import "ARDSettingsModel.h"
@ -128,6 +129,7 @@ static int const kKbpsMultiplier = 1000;
@synthesize defaultPeerConnectionConstraints =
_defaultPeerConnectionConstraints;
@synthesize isLoopback = _isLoopback;
@synthesize broadcast = _broadcast;
- (instancetype)init {
return [self initWithDelegate:nil];
@ -237,8 +239,7 @@ static int const kKbpsMultiplier = 1000;
[_turnClient requestServersWithCompletionHandler:^(NSArray *turnServers,
NSError *error) {
if (error) {
RTCLogError("Error retrieving TURN servers: %@",
error.localizedDescription);
RTCLogError(@"Error retrieving TURN servers: %@", error.localizedDescription);
}
ARDAppClient *strongSelf = weakSelf;
[strongSelf.iceServers addObjectsFromArray:turnServers];
@ -713,9 +714,14 @@ static int const kKbpsMultiplier = 1000;
RTCVideoSource *source = [_factory videoSource];
#if !TARGET_IPHONE_SIMULATOR
if (self.isBroadcast) {
ARDExternalSampleCapturer *capturer =
[[ARDExternalSampleCapturer alloc] initWithDelegate:source];
[_delegate appClient:self didCreateLocalExternalSampleCapturer:capturer];
} else {
RTCCameraVideoCapturer *capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:source];
[_delegate appClient:self didCreateLocalCapturer:capturer];
}
#else
#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
if (@available(iOS 10, *)) {

View File

@ -0,0 +1,18 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <WebRTC/RTCVideoCapturer.h>
@protocol ARDExternalSampleDelegate <NSObject>
- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer;
@end
@interface ARDExternalSampleCapturer : RTCVideoCapturer <ARDExternalSampleDelegate>
@end

View File

@ -0,0 +1,43 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDExternalSampleCapturer.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
@implementation ARDExternalSampleCapturer
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
return [super initWithDelegate:delegate];
}
#pragma mark - ARDExternalSampleDelegate
- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer {
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
return;
}
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (pixelBuffer == nil) {
return;
}
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
int64_t timeStampNs =
CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC;
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:RTCVideoRotation_0
timeStampNs:timeStampNs];
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
}
@end

View File

@ -0,0 +1,25 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <ReplayKit/ReplayKit.h>
#import "WebRTC/RTCLogging.h"
#import "ARDAppClient.h"
@protocol ARDExternalSampleDelegate;
API_AVAILABLE(ios(10.0))
@interface ARDBroadcastSampleHandler
: RPBroadcastSampleHandler<ARDAppClientDelegate>
@property(nonatomic, strong) id<ARDExternalSampleDelegate> capturer;
@end

View File

@ -0,0 +1,130 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDBroadcastSampleHandler.h"
#import <os/log.h>
#import "ARDExternalSampleCapturer.h"
#import "ARDSettingsModel.h"
#import "WebRTC/RTCCallbackLogger.h"
#import "WebRTC/RTCLogging.h"
@implementation ARDBroadcastSampleHandler {
ARDAppClient *_client;
RTCCallbackLogger *_callbackLogger;
}
@synthesize capturer = _capturer;
- (instancetype)init {
if (self = [super init]) {
_callbackLogger = [[RTCCallbackLogger alloc] init];
os_log_t rtc_os_log = os_log_create("com.google.AppRTCMobile", "RTCLog");
[_callbackLogger start:^(NSString *logMessage) {
os_log(rtc_os_log, "%{public}s", [logMessage cStringUsingEncoding:NSUTF8StringEncoding]);
}];
}
return self;
}
- (void)broadcastStartedWithSetupInfo:(NSDictionary<NSString *, NSObject *> *)setupInfo {
// User has requested to start the broadcast. Setup info from the UI extension can be supplied but
// optional.
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
_client = [[ARDAppClient alloc] initWithDelegate:self];
_client.broadcast = YES;
NSString *roomName = nil;
if (setupInfo[@"roomName"]) {
roomName = (NSString *)setupInfo[@"roomName"];
} else {
u_int32_t randomRoomSuffix = arc4random_uniform(1000);
roomName = [NSString stringWithFormat:@"broadcast_%d", randomRoomSuffix];
}
[_client connectToRoomWithId:roomName settings:settingsModel isLoopback:NO];
RTCLog(@"Broadcast started.");
}
- (void)broadcastPaused {
// User has requested to pause the broadcast. Samples will stop being delivered.
}
- (void)broadcastResumed {
// User has requested to resume the broadcast. Samples delivery will resume.
}
- (void)broadcastFinished {
// User has requested to finish the broadcast.
[_client disconnect];
}
- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer
withType:(RPSampleBufferType)sampleBufferType {
switch (sampleBufferType) {
case RPSampleBufferTypeVideo:
[self.capturer didCaptureSampleBuffer:sampleBuffer];
break;
case RPSampleBufferTypeAudioApp:
break;
case RPSampleBufferTypeAudioMic:
break;
default:
break;
}
}
#pragma mark - ARDAppClientDelegate
- (void)appClient:(ARDAppClient *)client didChangeState:(ARDAppClientState)state {
switch (state) {
case kARDAppClientStateConnected:
RTCLog(@"Client connected.");
break;
case kARDAppClientStateConnecting:
RTCLog("Client connecting.");
break;
case kARDAppClientStateDisconnected:
RTCLog(@"Client disconnected.");
break;
}
}
- (void)appClient:(ARDAppClient *)client didChangeConnectionState:(RTCIceConnectionState)state {
RTCLog(@"ICE state changed: %ld", (long)state);
}
- (void)appClient:(ARDAppClient *)client
didCreateLocalCapturer:(RTCCameraVideoCapturer *)localCapturer {
}
- (void)appClient:(ARDAppClient *)client
didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer {
self.capturer = externalSampleCapturer;
}
- (void)appClient:(ARDAppClient *)client
didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack {
}
- (void)appClient:(ARDAppClient *)client
didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
}
- (void)appClient:(ARDAppClient *)client didGetStats:(NSArray *)stats {
}
- (void)appClient:(ARDAppClient *)client didError:(NSError *)error {
RTCLog(@"Error: %@", error);
}
@end

View File

@ -0,0 +1,18 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <ReplayKit/ReplayKit.h>
#import <UIKit/UIKit.h>
API_AVAILABLE(ios(11.0))
@interface ARDBroadcastSetupViewController
: UIViewController<UITextFieldDelegate>
@end

View File

@ -0,0 +1,107 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDBroadcastSetupViewController.h"
@implementation ARDBroadcastSetupViewController {
UITextField *_roomNameField;
}
- (void)loadView {
UIView *view = [[UIView alloc] initWithFrame:CGRectZero];
view.backgroundColor = [UIColor colorWithWhite:1.0 alpha:0.7];
UIImageView *imageView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"Icon-180"]];
imageView.translatesAutoresizingMaskIntoConstraints = NO;
[view addSubview:imageView];
_roomNameField = [[UITextField alloc] initWithFrame:CGRectZero];
_roomNameField.borderStyle = UITextBorderStyleRoundedRect;
_roomNameField.font = [UIFont systemFontOfSize:14.0];
_roomNameField.translatesAutoresizingMaskIntoConstraints = NO;
_roomNameField.placeholder = @"Room name";
_roomNameField.returnKeyType = UIReturnKeyDone;
_roomNameField.delegate = self;
[view addSubview:_roomNameField];
UIButton *doneButton = [UIButton buttonWithType:UIButtonTypeSystem];
doneButton.translatesAutoresizingMaskIntoConstraints = NO;
doneButton.titleLabel.font = [UIFont systemFontOfSize:20.0];
[doneButton setTitle:@"Done" forState:UIControlStateNormal];
[doneButton addTarget:self
action:@selector(userDidFinishSetup)
forControlEvents:UIControlEventTouchUpInside];
[view addSubview:doneButton];
UIButton *cancelButton = [UIButton buttonWithType:UIButtonTypeSystem];
cancelButton.translatesAutoresizingMaskIntoConstraints = NO;
cancelButton.titleLabel.font = [UIFont systemFontOfSize:20.0];
[cancelButton setTitle:@"Cancel" forState:UIControlStateNormal];
[cancelButton addTarget:self
action:@selector(userDidCancelSetup)
forControlEvents:UIControlEventTouchUpInside];
[view addSubview:cancelButton];
UILayoutGuide *margin = view.layoutMarginsGuide;
[imageView.widthAnchor constraintEqualToConstant:60.0].active = YES;
[imageView.heightAnchor constraintEqualToConstant:60.0].active = YES;
[imageView.topAnchor constraintEqualToAnchor:margin.topAnchor constant:20].active = YES;
[imageView.centerXAnchor constraintEqualToAnchor:view.centerXAnchor].active = YES;
[_roomNameField.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES;
[_roomNameField.topAnchor constraintEqualToAnchor:imageView.bottomAnchor constant:20].active =
YES;
[_roomNameField.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES;
[doneButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES;
[doneButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:-20].active = YES;
[cancelButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES;
[cancelButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:-20].active = YES;
UITapGestureRecognizer *tgr =
[[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(didTap:)];
[view addGestureRecognizer:tgr];
self.view = view;
}
- (IBAction)didTap:(id)sender {
[self.view endEditing:YES];
}
- (void)userDidFinishSetup {
// URL of the resource where broadcast can be viewed that will be returned to the application
NSURL *broadcastURL = [NSURL
URLWithString:[NSString stringWithFormat:@"https://appr.tc/r/%@", _roomNameField.text]];
// Dictionary with setup information that will be provided to broadcast extension when broadcast
// is started
NSDictionary *setupInfo = @{@"roomName" : _roomNameField.text};
// Tell ReplayKit that the extension is finished setting up and can begin broadcasting
[self.extensionContext completeRequestWithBroadcastURL:broadcastURL setupInfo:setupInfo];
}
- (void)userDidCancelSetup {
// Tell ReplayKit that the extension was cancelled by the user
[self.extensionContext cancelRequestWithError:[NSError errorWithDomain:@"com.google.AppRTCMobile"
code:-1
userInfo:nil]];
}
#pragma mark - UITextFieldDelegate
- (BOOL)textFieldShouldReturn:(UITextField *)textField {
[self userDidFinishSetup];
return YES;
}
@end

View File

@ -0,0 +1,39 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleDisplayName</key>
<string>AppRTCMobile</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>com.google.AppRTCMobile.BroadcastSetupUI</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>XPC!</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>NSExtension</key>
<dict>
<key>NSExtensionAttributes</key>
<dict>
<key>NSExtensionActivationRule</key>
<dict>
<key>NSExtensionActivationSupportsReplayKitStreaming</key>
<true/>
</dict>
</dict>
<key>NSExtensionPointIdentifier</key>
<string>com.apple.broadcast-services-setupui</string>
<key>NSExtensionPrincipalClass</key>
<string>ARDBroadcastSetupViewController</string>
</dict>
</dict>
</plist>

View File

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleDisplayName</key>
<string>AppRTCMobile</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>com.google.AppRTCMobile.BroadcastUpload</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>XPC!</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>NSExtension</key>
<dict>
<key>NSExtensionPointIdentifier</key>
<string>com.apple.broadcast-services-upload</string>
<key>NSExtensionPrincipalClass</key>
<string>ARDBroadcastSampleHandler</string>
<key>RPBroadcastProcessMode</key>
<string>RPBroadcastProcessModeSampleBuffer</string>
</dict>
</dict>
</plist>

View File

@ -48,6 +48,12 @@ if (is_ios || is_mac) {
]
}
config("used_from_extension") {
if (is_ios && rtc_apprtcmobile_broadcast_extension) {
cflags = [ "-fapplication-extension" ]
}
}
rtc_static_library("common_objc") {
sources = [
"objc/Framework/Classes/Common/NSString+StdString.h",
@ -71,7 +77,10 @@ if (is_ios || is_mac) {
"../rtc_base:checks",
"../rtc_base:rtc_base",
]
configs += [ "..:common_objc" ]
configs += [
"..:common_objc",
":used_from_extension",
]
public_configs = [ ":common_config_objc" ]
@ -176,7 +185,10 @@ if (is_ios || is_mac) {
"objc/Framework/Headers/WebRTC/RTCAudioSession.h",
"objc/Framework/Headers/WebRTC/RTCAudioSessionConfiguration.h",
]
configs += [ "..:common_objc" ]
configs += [
"..:common_objc",
":used_from_extension",
]
public_configs = [ ":common_config_objc" ]
@ -218,7 +230,10 @@ if (is_ios || is_mac) {
"//third_party/libyuv",
]
configs += [ "..:common_objc" ]
configs += [
"..:common_objc",
":used_from_extension",
]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
@ -244,7 +259,10 @@ if (is_ios || is_mac) {
"//rtc_base:rtc_base_approved",
"//third_party/libyuv",
]
configs += [ "..:common_objc" ]
configs += [
"..:common_objc",
":used_from_extension",
]
}
rtc_static_library("video_objc") {
@ -294,7 +312,10 @@ if (is_ios || is_mac) {
"../rtc_base:rtc_base",
]
configs += [ "..:common_objc" ]
configs += [
"..:common_objc",
":used_from_extension",
]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin
# (bugs.webrtc.org/163).
@ -629,7 +650,10 @@ if (is_ios || is_mac) {
"objc/Framework/Headers/WebRTC/RTCMediaSource.h",
]
configs += [ "..:common_objc" ]
configs += [
"..:common_objc",
":used_from_extension",
]
public_configs = [ ":common_config_objc" ]
deps = [
@ -734,7 +758,10 @@ if (is_ios || is_mac) {
"objc/Framework/Headers/WebRTC/RTCVideoTrack.h",
]
configs += [ "..:common_objc" ]
configs += [
"..:common_objc",
":used_from_extension",
]
public_configs = [ ":common_config_objc" ]
if (!build_with_chromium && is_clang) {
@ -1056,7 +1083,10 @@ if (is_ios || is_mac) {
"GLKit.framework",
]
configs += [ "..:common_objc" ]
configs += [
"..:common_objc",
":used_from_extension",
]
public_configs = [ ":common_config_objc" ]
@ -1218,7 +1248,14 @@ if (is_ios || is_mac) {
"objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm",
]
configs += [ "..:common_objc" ]
configs += [
"..:common_objc",
":used_from_extension",
]
if (is_ios && rtc_apprtcmobile_broadcast_extension) {
defines = [ "RTC_APPRTCMOBILE_BROADCAST_EXTENSION" ]
}
deps = [
":common_objc",

View File

@ -12,6 +12,7 @@
#import <Foundation/Foundation.h>
NS_EXTENSION_UNAVAILABLE_IOS("Application status not available in app extensions.")
@interface RTCUIApplicationStatusObserver : NSObject
+ (instancetype)sharedInstance;

View File

@ -78,7 +78,7 @@ void decompressionOutputCallback(void *decoderRef,
- (instancetype)init {
if (self = [super init]) {
#if defined(WEBRTC_IOS)
#if defined(WEBRTC_IOS) && !defined(RTC_APPRTCMOBILE_BROADCAST_EXTENSION)
[RTCUIApplicationStatusObserver prepareForUse];
_error = noErr;
#endif
@ -113,7 +113,7 @@ void decompressionOutputCallback(void *decoderRef,
return WEBRTC_VIDEO_CODEC_ERROR;
}
#if defined(WEBRTC_IOS)
#if defined(WEBRTC_IOS) && !defined(RTC_APPRTCMOBILE_BROADCAST_EXTENSION)
if (![[RTCUIApplicationStatusObserver sharedInstance] isApplicationActive]) {
// Ignore all decode requests when app isn't active. In this state, the
// hardware decoder has been invalidated by the OS.

View File

@ -307,7 +307,7 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
RTC_LOG(LS_INFO) << "Using profile " << CFStringToString(_profile);
RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name]);
#if defined(WEBRTC_IOS)
#if defined(WEBRTC_IOS) && !defined(RTC_APPRTCMOBILE_BROADCAST_EXTENSION)
[RTCUIApplicationStatusObserver prepareForUse];
#endif
}
@ -345,7 +345,7 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
if (!_callback || !_compressionSession) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
#if defined(WEBRTC_IOS)
#if defined(WEBRTC_IOS) && !defined(RTC_APPRTCMOBILE_BROADCAST_EXTENSION)
if (![[RTCUIApplicationStatusObserver sharedInstance] isApplicationActive]) {
// Ignore all encode requests when app isn't active. In this state, the
// hardware encoder has been invalidated by the OS.

View File

@ -19,6 +19,7 @@ NS_ASSUME_NONNULL_BEGIN
RTC_EXPORT
// Camera capture that implements RTCVideoCapturer. Delivers frames to a RTCVideoCapturerDelegate
// (usually RTCVideoSource).
NS_EXTENSION_UNAVAILABLE_IOS("Camera not available in app extensions.")
@interface RTCCameraVideoCapturer : RTCVideoCapturer
// Capture session that is used for capturing. Valid from initialization to dealloc.

View File

@ -28,6 +28,7 @@ RTC_EXPORT
* bounds using OpenGLES 2.0 or OpenGLES 3.0.
*/
RTC_EXPORT
NS_EXTENSION_UNAVAILABLE_IOS("Rendering not available in app extensions.")
@interface RTCEAGLVideoView : UIView <RTCVideoRenderer>
@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;

View File

@ -151,6 +151,12 @@ declare_args() {
# Disable this to build without support for built-in software codecs.
rtc_use_builtin_sw_codecs = true
if (is_ios) {
# Build broadcast extension in AppRTCMobile for iOS. This results in the
# binary only running on iOS 11+, which is why it is disabled by default.
rtc_apprtcmobile_broadcast_extension = false
}
}
if (!build_with_mozilla) {