summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/examples/objc
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient+Internal.h52
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.h87
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.m899
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.h14
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.m175
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.h26
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.m116
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.h18
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m52
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse+Internal.h23
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.h32
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.m82
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse+Internal.h17
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.h26
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.m46
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDRoomServerClient.h32
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel+Private.h21
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.h123
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.m211
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.h52
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.m115
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingChannel.h48
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.h58
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.m160
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.h26
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.m36
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient+Internal.h17
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.h23
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.m86
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.h40
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.m252
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h23
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m100
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.h18
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.m25
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.h20
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m36
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.h35
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.m126
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.h17
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.m56
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h42
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m45
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.h30
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.m196
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.h14
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.m263
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.h37
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.m361
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.h21
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.m50
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.h47
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m213
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.h28
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m250
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/Info.plist109
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.h18
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m37
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.h18
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.m31
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h24
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m130
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h17
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m107
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastSetupUIInfo.plist39
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastUploadInfo.plist33
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/main.m20
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/Roboto-Regular.ttfbin0 -> 126072 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/foreman.mp4bin0 -> 546651 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone5@2x.pngbin0 -> 3640 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6@2x.pngbin0 -> 4856 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6p@3x.pngbin0 -> 11152 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp.pngbin0 -> 316 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp@2x.pngbin0 -> 479 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp.pngbin0 -> 257 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp@2x.pngbin0 -> 360 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp.pngbin0 -> 322 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp@2x.pngbin0 -> 557 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp.pngbin0 -> 285 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp@2x.pngbin0 -> 570 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp.pngbin0 -> 242 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp@2x.pngbin0 -> 311 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/mozart.mp3bin0 -> 893658 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.h14
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.m55
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.h17
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.m407
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/mac/Info.plist33
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/mac/main.m22
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm266
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm62
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDSettingsModel_xctest.mm96
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/tests/main.mm21
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/LICENSE15
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.h135
-rw-r--r--third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m1774
-rw-r--r--third_party/libwebrtc/examples/objc/Icon-120.pngbin0 -> 8133 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/Icon-180.pngbin0 -> 12502 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/Icon.pngbin0 -> 62469 bytes
-rw-r--r--third_party/libwebrtc/examples/objc/README3
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/Info.plist45
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.h17
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.m63
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.h15
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.mm154
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/objc/main.m18
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.h82
-rw-r--r--third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.mm238
108 files changed, 9153 insertions, 0 deletions
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient+Internal.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient+Internal.h
new file mode 100644
index 0000000000..31e0e4dd7c
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient+Internal.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDAppClient.h"
+
+#import "sdk/objc/api/peerconnection/RTCPeerConnection.h"
+
+#import "ARDRoomServerClient.h"
+#import "ARDSignalingChannel.h"
+#import "ARDTURNClient.h"
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+
+@interface ARDAppClient () <ARDSignalingChannelDelegate, RTC_OBJC_TYPE (RTCPeerConnectionDelegate)>
+
+// All properties should only be mutated from the main queue.
+@property(nonatomic, strong) id<ARDRoomServerClient> roomServerClient;
+@property(nonatomic, strong) id<ARDSignalingChannel> channel;
+@property(nonatomic, strong) id<ARDSignalingChannel> loopbackChannel;
+@property(nonatomic, strong) id<ARDTURNClient> turnClient;
+
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+@property(nonatomic, strong) NSMutableArray *messageQueue;
+
+@property(nonatomic, assign) BOOL isTurnComplete;
+@property(nonatomic, assign) BOOL hasReceivedSdp;
+@property(nonatomic, readonly) BOOL hasJoinedRoomServerRoom;
+
+@property(nonatomic, strong) NSString *roomId;
+@property(nonatomic, strong) NSString *clientId;
+@property(nonatomic, assign) BOOL isInitiator;
+@property(nonatomic, strong) NSMutableArray *iceServers;
+@property(nonatomic, strong) NSURL *webSocketURL;
+@property(nonatomic, strong) NSURL *webSocketRestURL;
+@property(nonatomic, readonly) BOOL isLoopback;
+
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCMediaConstraints) * defaultPeerConnectionConstraints;
+
+- (instancetype)initWithRoomServerClient:(id<ARDRoomServerClient>)rsClient
+ signalingChannel:(id<ARDSignalingChannel>)channel
+ turnClient:(id<ARDTURNClient>)turnClient
+ delegate:(id<ARDAppClientDelegate>)delegate;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.h
new file mode 100644
index 0000000000..91d2cef1ce
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "sdk/objc/api/peerconnection/RTCPeerConnection.h"
+#import "sdk/objc/api/peerconnection/RTCVideoTrack.h"
+
+typedef NS_ENUM(NSInteger, ARDAppClientState) {
+ // Disconnected from servers.
+ kARDAppClientStateDisconnected,
+ // Connecting to servers.
+ kARDAppClientStateConnecting,
+ // Connected to servers.
+ kARDAppClientStateConnected,
+};
+
+@class ARDAppClient;
+@class ARDSettingsModel;
+@class ARDExternalSampleCapturer;
+@class RTC_OBJC_TYPE(RTCMediaConstraints);
+@class RTC_OBJC_TYPE(RTCCameraVideoCapturer);
+@class RTC_OBJC_TYPE(RTCFileVideoCapturer);
+
+// The delegate is informed of pertinent events and will be called on the
+// main queue.
+@protocol ARDAppClientDelegate <NSObject>
+
+- (void)appClient:(ARDAppClient *)client didChangeState:(ARDAppClientState)state;
+
+- (void)appClient:(ARDAppClient *)client didChangeConnectionState:(RTCIceConnectionState)state;
+
+- (void)appClient:(ARDAppClient *)client
+ didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer;
+
+- (void)appClient:(ARDAppClient *)client
+ didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack;
+
+- (void)appClient:(ARDAppClient *)client
+ didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack;
+
+- (void)appClient:(ARDAppClient *)client didError:(NSError *)error;
+
+- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats;
+
+@optional
+- (void)appClient:(ARDAppClient *)client
+ didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer;
+
+- (void)appClient:(ARDAppClient *)client
+ didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer;
+
+@end
+
+// Handles connections to the AppRTC server for a given room. Methods on this
+// class should only be called from the main queue.
+@interface ARDAppClient : NSObject
+
+// If `shouldGetStats` is true, stats will be reported in 1s intervals through
+// the delegate.
+@property(nonatomic, assign) BOOL shouldGetStats;
+@property(nonatomic, readonly) ARDAppClientState state;
+@property(nonatomic, weak) id<ARDAppClientDelegate> delegate;
+@property(nonatomic, assign, getter=isBroadcast) BOOL broadcast;
+
+// Convenience constructor since all expected use cases will need a delegate
+// in order to receive remote tracks.
+- (instancetype)initWithDelegate:(id<ARDAppClientDelegate>)delegate;
+
+// Establishes a connection with the AppRTC servers for the given room id.
+// `settings` is an object containing settings such as video codec for the call.
+// If `isLoopback` is true, the call will connect to itself.
+- (void)connectToRoomWithId:(NSString *)roomId
+ settings:(ARDSettingsModel *)settings
+ isLoopback:(BOOL)isLoopback;
+
+// Disconnects from the AppRTC servers and any connected clients.
+- (void)disconnect;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.m
new file mode 100644
index 0000000000..4420972598
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppClient.m
@@ -0,0 +1,899 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDAppClient+Internal.h"
+
+#import "sdk/objc/api/peerconnection/RTCAudioTrack.h"
+#import "sdk/objc/api/peerconnection/RTCConfiguration.h"
+#import "sdk/objc/api/peerconnection/RTCFileLogger.h"
+#import "sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.h"
+#import "sdk/objc/api/peerconnection/RTCIceServer.h"
+#import "sdk/objc/api/peerconnection/RTCMediaConstraints.h"
+#import "sdk/objc/api/peerconnection/RTCMediaStream.h"
+#import "sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h"
+#import "sdk/objc/api/peerconnection/RTCRtpSender.h"
+#import "sdk/objc/api/peerconnection/RTCRtpTransceiver.h"
+#import "sdk/objc/api/peerconnection/RTCTracing.h"
+#import "sdk/objc/api/peerconnection/RTCVideoSource.h"
+#import "sdk/objc/api/peerconnection/RTCVideoTrack.h"
+#import "sdk/objc/base/RTCLogging.h"
+#import "sdk/objc/components/capturer/RTCCameraVideoCapturer.h"
+#import "sdk/objc/components/capturer/RTCFileVideoCapturer.h"
+#import "sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h"
+#import "sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h"
+
+#import "ARDAppEngineClient.h"
+#import "ARDExternalSampleCapturer.h"
+#import "ARDJoinResponse.h"
+#import "ARDMessageResponse.h"
+#import "ARDSettingsModel.h"
+#import "ARDSignalingMessage.h"
+#import "ARDTURNClient+Internal.h"
+#import "ARDUtilities.h"
+#import "ARDWebSocketChannel.h"
+#import "RTCIceCandidate+JSON.h"
+#import "RTCSessionDescription+JSON.h"
+
+static NSString * const kARDIceServerRequestUrl = @"https://appr.tc/params";
+
+static NSString * const kARDAppClientErrorDomain = @"ARDAppClient";
+static NSInteger const kARDAppClientErrorUnknown = -1;
+static NSInteger const kARDAppClientErrorRoomFull = -2;
+static NSInteger const kARDAppClientErrorCreateSDP = -3;
+static NSInteger const kARDAppClientErrorSetSDP = -4;
+static NSInteger const kARDAppClientErrorInvalidClient = -5;
+static NSInteger const kARDAppClientErrorInvalidRoom = -6;
+static NSString * const kARDMediaStreamId = @"ARDAMS";
+static NSString * const kARDAudioTrackId = @"ARDAMSa0";
+static NSString * const kARDVideoTrackId = @"ARDAMSv0";
+static NSString * const kARDVideoTrackKind = @"video";
+
+// TODO(tkchin): Add these as UI options.
+#if defined(WEBRTC_IOS)
+static BOOL const kARDAppClientEnableTracing = NO;
+static BOOL const kARDAppClientEnableRtcEventLog = YES;
+static int64_t const kARDAppClientAecDumpMaxSizeInBytes = 5e6; // 5 MB.
+static int64_t const kARDAppClientRtcEventLogMaxSizeInBytes = 5e6; // 5 MB.
+#endif
+static int const kKbpsMultiplier = 1000;
+
+// We need a proxy to NSTimer because it causes a strong retain cycle. When
+// using the proxy, `invalidate` must be called before it properly deallocs.
+@interface ARDTimerProxy : NSObject
+
+- (instancetype)initWithInterval:(NSTimeInterval)interval
+ repeats:(BOOL)repeats
+ timerHandler:(void (^)(void))timerHandler;
+- (void)invalidate;
+
+@end
+
+@implementation ARDTimerProxy {
+ NSTimer *_timer;
+ void (^_timerHandler)(void);
+}
+
+- (instancetype)initWithInterval:(NSTimeInterval)interval
+ repeats:(BOOL)repeats
+ timerHandler:(void (^)(void))timerHandler {
+ NSParameterAssert(timerHandler);
+ if (self = [super init]) {
+ _timerHandler = timerHandler;
+ _timer = [NSTimer scheduledTimerWithTimeInterval:interval
+ target:self
+ selector:@selector(timerDidFire:)
+ userInfo:nil
+ repeats:repeats];
+ }
+ return self;
+}
+
+- (void)invalidate {
+ [_timer invalidate];
+}
+
+- (void)timerDidFire:(NSTimer *)timer {
+ _timerHandler();
+}
+
+@end
+
+@implementation ARDAppClient {
+ RTC_OBJC_TYPE(RTCFileLogger) * _fileLogger;
+ ARDTimerProxy *_statsTimer;
+ ARDSettingsModel *_settings;
+ RTC_OBJC_TYPE(RTCVideoTrack) * _localVideoTrack;
+}
+
+@synthesize shouldGetStats = _shouldGetStats;
+@synthesize state = _state;
+@synthesize delegate = _delegate;
+@synthesize roomServerClient = _roomServerClient;
+@synthesize channel = _channel;
+@synthesize loopbackChannel = _loopbackChannel;
+@synthesize turnClient = _turnClient;
+@synthesize peerConnection = _peerConnection;
+@synthesize factory = _factory;
+@synthesize messageQueue = _messageQueue;
+@synthesize isTurnComplete = _isTurnComplete;
+@synthesize hasReceivedSdp = _hasReceivedSdp;
+@synthesize roomId = _roomId;
+@synthesize clientId = _clientId;
+@synthesize isInitiator = _isInitiator;
+@synthesize iceServers = _iceServers;
+@synthesize webSocketURL = _websocketURL;
+@synthesize webSocketRestURL = _websocketRestURL;
+@synthesize defaultPeerConnectionConstraints =
+ _defaultPeerConnectionConstraints;
+@synthesize isLoopback = _isLoopback;
+@synthesize broadcast = _broadcast;
+
+- (instancetype)init {
+ return [self initWithDelegate:nil];
+}
+
+- (instancetype)initWithDelegate:(id<ARDAppClientDelegate>)delegate {
+ if (self = [super init]) {
+ _roomServerClient = [[ARDAppEngineClient alloc] init];
+ _delegate = delegate;
+ NSURL *turnRequestURL = [NSURL URLWithString:kARDIceServerRequestUrl];
+ _turnClient = [[ARDTURNClient alloc] initWithURL:turnRequestURL];
+ [self configure];
+ }
+ return self;
+}
+
+// TODO(tkchin): Provide signaling channel factory interface so we can recreate
+// channel if we need to on network failure. Also, make this the default public
+// constructor.
+- (instancetype)initWithRoomServerClient:(id<ARDRoomServerClient>)rsClient
+ signalingChannel:(id<ARDSignalingChannel>)channel
+ turnClient:(id<ARDTURNClient>)turnClient
+ delegate:(id<ARDAppClientDelegate>)delegate {
+ NSParameterAssert(rsClient);
+ NSParameterAssert(channel);
+ NSParameterAssert(turnClient);
+ if (self = [super init]) {
+ _roomServerClient = rsClient;
+ _channel = channel;
+ _turnClient = turnClient;
+ _delegate = delegate;
+ [self configure];
+ }
+ return self;
+}
+
+- (void)configure {
+ _messageQueue = [NSMutableArray array];
+ _iceServers = [NSMutableArray array];
+ _fileLogger = [[RTC_OBJC_TYPE(RTCFileLogger) alloc] init];
+ [_fileLogger start];
+}
+
+- (void)dealloc {
+ self.shouldGetStats = NO;
+ [self disconnect];
+}
+
+- (void)setShouldGetStats:(BOOL)shouldGetStats {
+ if (_shouldGetStats == shouldGetStats) {
+ return;
+ }
+ if (shouldGetStats) {
+ __weak ARDAppClient *weakSelf = self;
+ _statsTimer = [[ARDTimerProxy alloc] initWithInterval:1
+ repeats:YES
+ timerHandler:^{
+ ARDAppClient *strongSelf = weakSelf;
+ [strongSelf.peerConnection statisticsWithCompletionHandler:^(
+ RTC_OBJC_TYPE(RTCStatisticsReport) * stats) {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ ARDAppClient *strongSelf = weakSelf;
+ [strongSelf.delegate appClient:strongSelf didGetStats:stats];
+ });
+ }];
+ }];
+ } else {
+ [_statsTimer invalidate];
+ _statsTimer = nil;
+ }
+ _shouldGetStats = shouldGetStats;
+}
+
+- (void)setState:(ARDAppClientState)state {
+ if (_state == state) {
+ return;
+ }
+ _state = state;
+ [_delegate appClient:self didChangeState:_state];
+}
+
+- (void)connectToRoomWithId:(NSString *)roomId
+ settings:(ARDSettingsModel *)settings
+ isLoopback:(BOOL)isLoopback {
+ NSParameterAssert(roomId.length);
+ NSParameterAssert(_state == kARDAppClientStateDisconnected);
+ _settings = settings;
+ _isLoopback = isLoopback;
+ self.state = kARDAppClientStateConnecting;
+
+ RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) *decoderFactory =
+ [[RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) alloc] init];
+ RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) *encoderFactory =
+ [[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init];
+ encoderFactory.preferredCodec = [settings currentVideoCodecSettingFromStore];
+ _factory =
+ [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] initWithEncoderFactory:encoderFactory
+ decoderFactory:decoderFactory];
+
+#if defined(WEBRTC_IOS)
+ if (kARDAppClientEnableTracing) {
+ NSString *filePath = [self documentsFilePathForFileName:@"webrtc-trace.txt"];
+ RTCStartInternalCapture(filePath);
+ }
+#endif
+
+ // Request TURN.
+ __weak ARDAppClient *weakSelf = self;
+ [_turnClient requestServersWithCompletionHandler:^(NSArray *turnServers,
+ NSError *error) {
+ if (error) {
+ RTCLogError(@"Error retrieving TURN servers: %@", error.localizedDescription);
+ }
+ ARDAppClient *strongSelf = weakSelf;
+ [strongSelf.iceServers addObjectsFromArray:turnServers];
+ strongSelf.isTurnComplete = YES;
+ [strongSelf startSignalingIfReady];
+ }];
+
+ // Join room on room server.
+ [_roomServerClient joinRoomWithRoomId:roomId
+ isLoopback:isLoopback
+ completionHandler:^(ARDJoinResponse *response, NSError *error) {
+ ARDAppClient *strongSelf = weakSelf;
+ if (error) {
+ [strongSelf.delegate appClient:strongSelf didError:error];
+ return;
+ }
+ NSError *joinError =
+ [[strongSelf class] errorForJoinResultType:response.result];
+ if (joinError) {
+ RTCLogError(@"Failed to join room:%@ on room server.", roomId);
+ [strongSelf disconnect];
+ [strongSelf.delegate appClient:strongSelf didError:joinError];
+ return;
+ }
+ RTCLog(@"Joined room:%@ on room server.", roomId);
+ strongSelf.roomId = response.roomId;
+ strongSelf.clientId = response.clientId;
+ strongSelf.isInitiator = response.isInitiator;
+ for (ARDSignalingMessage *message in response.messages) {
+ if (message.type == kARDSignalingMessageTypeOffer ||
+ message.type == kARDSignalingMessageTypeAnswer) {
+ strongSelf.hasReceivedSdp = YES;
+ [strongSelf.messageQueue insertObject:message atIndex:0];
+ } else {
+ [strongSelf.messageQueue addObject:message];
+ }
+ }
+ strongSelf.webSocketURL = response.webSocketURL;
+ strongSelf.webSocketRestURL = response.webSocketRestURL;
+ [strongSelf registerWithColliderIfReady];
+ [strongSelf startSignalingIfReady];
+ }];
+}
+
+- (void)disconnect {
+ if (_state == kARDAppClientStateDisconnected) {
+ return;
+ }
+ if (self.hasJoinedRoomServerRoom) {
+ [_roomServerClient leaveRoomWithRoomId:_roomId
+ clientId:_clientId
+ completionHandler:nil];
+ }
+ if (_channel) {
+ if (_channel.state == kARDSignalingChannelStateRegistered) {
+ // Tell the other client we're hanging up.
+ ARDByeMessage *byeMessage = [[ARDByeMessage alloc] init];
+ [_channel sendMessage:byeMessage];
+ }
+ // Disconnect from collider.
+ _channel = nil;
+ }
+ _clientId = nil;
+ _roomId = nil;
+ _isInitiator = NO;
+ _hasReceivedSdp = NO;
+ _messageQueue = [NSMutableArray array];
+ _localVideoTrack = nil;
+#if defined(WEBRTC_IOS)
+ [_factory stopAecDump];
+ [_peerConnection stopRtcEventLog];
+#endif
+ [_peerConnection close];
+ _peerConnection = nil;
+ self.state = kARDAppClientStateDisconnected;
+#if defined(WEBRTC_IOS)
+ if (kARDAppClientEnableTracing) {
+ RTCStopInternalCapture();
+ }
+#endif
+}
+
+#pragma mark - ARDSignalingChannelDelegate
+
+- (void)channel:(id<ARDSignalingChannel>)channel
+ didReceiveMessage:(ARDSignalingMessage *)message {
+ switch (message.type) {
+ case kARDSignalingMessageTypeOffer:
+ case kARDSignalingMessageTypeAnswer:
+ // Offers and answers must be processed before any other message, so we
+ // place them at the front of the queue.
+ _hasReceivedSdp = YES;
+ [_messageQueue insertObject:message atIndex:0];
+ break;
+ case kARDSignalingMessageTypeCandidate:
+ case kARDSignalingMessageTypeCandidateRemoval:
+ [_messageQueue addObject:message];
+ break;
+ case kARDSignalingMessageTypeBye:
+ // Disconnects can be processed immediately.
+ [self processSignalingMessage:message];
+ return;
+ }
+ [self drainMessageQueueIfReady];
+}
+
+- (void)channel:(id<ARDSignalingChannel>)channel
+ didChangeState:(ARDSignalingChannelState)state {
+ switch (state) {
+ case kARDSignalingChannelStateOpen:
+ break;
+ case kARDSignalingChannelStateRegistered:
+ break;
+ case kARDSignalingChannelStateClosed:
+ case kARDSignalingChannelStateError:
+ // TODO(tkchin): reconnection scenarios. Right now we just disconnect
+ // completely if the websocket connection fails.
+ [self disconnect];
+ break;
+ }
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCPeerConnectionDelegate)
+// Callbacks for this delegate occur on non-main thread and need to be
+// dispatched back to main queue as needed.
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeSignalingState:(RTCSignalingState)stateChanged {
+ RTCLog(@"Signaling state changed: %ld", (long)stateChanged);
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didAddStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream {
+ RTCLog(@"Stream with %lu video tracks and %lu audio tracks was added.",
+ (unsigned long)stream.videoTracks.count,
+ (unsigned long)stream.audioTracks.count);
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didStartReceivingOnTransceiver:(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver {
+ RTC_OBJC_TYPE(RTCMediaStreamTrack) *track = transceiver.receiver.track;
+ RTCLog(@"Now receiving %@ on track %@.", track.kind, track.trackId);
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didRemoveStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream {
+ RTCLog(@"Stream was removed.");
+}
+
+- (void)peerConnectionShouldNegotiate:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection {
+ RTCLog(@"WARNING: Renegotiation needed but unimplemented.");
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeIceConnectionState:(RTCIceConnectionState)newState {
+ RTCLog(@"ICE state changed: %ld", (long)newState);
+ dispatch_async(dispatch_get_main_queue(), ^{
+ [self.delegate appClient:self didChangeConnectionState:newState];
+ });
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeConnectionState:(RTCPeerConnectionState)newState {
+ RTCLog(@"ICE+DTLS state changed: %ld", (long)newState);
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeIceGatheringState:(RTCIceGatheringState)newState {
+ RTCLog(@"ICE gathering state changed: %ld", (long)newState);
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didGenerateIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ ARDICECandidateMessage *message =
+ [[ARDICECandidateMessage alloc] initWithCandidate:candidate];
+ [self sendSignalingMessage:message];
+ });
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didFailToGatherIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidateErrorEvent) *)event {
+ RTCLog(@"Failed to gather ICE candidate. address: %@, port: %d, url: %@, errorCode: %d, "
+ @"errorText: %@",
+ event.address,
+ event.port,
+ event.url,
+ event.errorCode,
+ event.errorText);
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didRemoveIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ ARDICECandidateRemovalMessage *message =
+ [[ARDICECandidateRemovalMessage alloc]
+ initWithRemovedCandidates:candidates];
+ [self sendSignalingMessage:message];
+ });
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeLocalCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)local
+ didChangeRemoteCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)remote
+ lastReceivedMs:(int)lastDataReceivedMs
+ didHaveReason:(NSString *)reason {
+ RTCLog(@"ICE candidate pair changed because: %@", reason);
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didOpenDataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel {
+}
+
+#pragma mark - RTCSessionDescriptionDelegate
+// Callbacks for this delegate occur on non-main thread and need to be
+// dispatched back to main queue as needed.
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didCreateSessionDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
+ error:(NSError *)error {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ if (error) {
+ RTCLogError(@"Failed to create session description. Error: %@", error);
+ [self disconnect];
+ NSDictionary *userInfo = @{
+ NSLocalizedDescriptionKey: @"Failed to create session description.",
+ };
+ NSError *sdpError =
+ [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
+ code:kARDAppClientErrorCreateSDP
+ userInfo:userInfo];
+ [self.delegate appClient:self didError:sdpError];
+ return;
+ }
+ __weak ARDAppClient *weakSelf = self;
+ [self.peerConnection setLocalDescription:sdp
+ completionHandler:^(NSError *error) {
+ ARDAppClient *strongSelf = weakSelf;
+ [strongSelf peerConnection:strongSelf.peerConnection
+ didSetSessionDescriptionWithError:error];
+ }];
+ ARDSessionDescriptionMessage *message =
+ [[ARDSessionDescriptionMessage alloc] initWithDescription:sdp];
+ [self sendSignalingMessage:message];
+ [self setMaxBitrateForPeerConnectionVideoSender];
+ });
+}
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didSetSessionDescriptionWithError:(NSError *)error {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ if (error) {
+ RTCLogError(@"Failed to set session description. Error: %@", error);
+ [self disconnect];
+ NSDictionary *userInfo = @{
+ NSLocalizedDescriptionKey: @"Failed to set session description.",
+ };
+ NSError *sdpError =
+ [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
+ code:kARDAppClientErrorSetSDP
+ userInfo:userInfo];
+ [self.delegate appClient:self didError:sdpError];
+ return;
+ }
+ // If we're answering and we've just set the remote offer we need to create
+ // an answer and set the local description.
+ if (!self.isInitiator && !self.peerConnection.localDescription) {
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultAnswerConstraints];
+ __weak ARDAppClient *weakSelf = self;
+ [self.peerConnection
+ answerForConstraints:constraints
+ completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) {
+ ARDAppClient *strongSelf = weakSelf;
+ [strongSelf peerConnection:strongSelf.peerConnection
+ didCreateSessionDescription:sdp
+ error:error];
+ }];
+ }
+ });
+}
+
+#pragma mark - Private
+
+#if defined(WEBRTC_IOS)
+
+- (NSString *)documentsFilePathForFileName:(NSString *)fileName {
+ NSParameterAssert(fileName.length);
+ NSArray *paths = NSSearchPathForDirectoriesInDomains(
+ NSDocumentDirectory, NSUserDomainMask, YES);
+ NSString *documentsDirPath = paths.firstObject;
+ NSString *filePath =
+ [documentsDirPath stringByAppendingPathComponent:fileName];
+ return filePath;
+}
+
+#endif
+
+- (BOOL)hasJoinedRoomServerRoom {
+ return _clientId.length;
+}
+
+// Begins the peer connection connection process if we have both joined a room
+// on the room server and tried to obtain a TURN server. Otherwise does nothing.
+// A peer connection object will be created with a stream that contains local
+// audio and video capture. If this client is the caller, an offer is created as
+// well, otherwise the client will wait for an offer to arrive.
+- (void)startSignalingIfReady {
+ if (!_isTurnComplete || !self.hasJoinedRoomServerRoom) {
+ return;
+ }
+ self.state = kARDAppClientStateConnected;
+
+ // Create peer connection.
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultPeerConnectionConstraints];
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ RTC_OBJC_TYPE(RTCCertificate) *pcert = [RTC_OBJC_TYPE(RTCCertificate)
+ generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}];
+ config.iceServers = _iceServers;
+ config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
+ config.certificate = pcert;
+
+ _peerConnection = [_factory peerConnectionWithConfiguration:config
+ constraints:constraints
+ delegate:self];
+ // Create AV senders.
+ [self createMediaSenders];
+ if (_isInitiator) {
+ // Send offer.
+ __weak ARDAppClient *weakSelf = self;
+ [_peerConnection
+ offerForConstraints:[self defaultOfferConstraints]
+ completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) {
+ ARDAppClient *strongSelf = weakSelf;
+ [strongSelf peerConnection:strongSelf.peerConnection
+ didCreateSessionDescription:sdp
+ error:error];
+ }];
+ } else {
+ // Check if we've received an offer.
+ [self drainMessageQueueIfReady];
+ }
+#if defined(WEBRTC_IOS)
+ // Start event log.
+ if (kARDAppClientEnableRtcEventLog) {
+ NSString *filePath = [self documentsFilePathForFileName:@"webrtc-rtceventlog"];
+ if (![_peerConnection startRtcEventLogWithFilePath:filePath
+ maxSizeInBytes:kARDAppClientRtcEventLogMaxSizeInBytes]) {
+ RTCLogError(@"Failed to start event logging.");
+ }
+ }
+
+ // Start aecdump diagnostic recording.
+ if ([_settings currentCreateAecDumpSettingFromStore]) {
+ NSString *filePath = [self documentsFilePathForFileName:@"webrtc-audio.aecdump"];
+ if (![_factory startAecDumpWithFilePath:filePath
+ maxSizeInBytes:kARDAppClientAecDumpMaxSizeInBytes]) {
+ RTCLogError(@"Failed to start aec dump.");
+ }
+ }
+#endif
+}
+
+// Processes the messages that we've received from the room server and the
+// signaling channel. The offer or answer message must be processed before other
+// signaling messages, however they can arrive out of order. Hence, this method
+// only processes pending messages if there is a peer connection object and
+// if we have received either an offer or answer.
+- (void)drainMessageQueueIfReady {
+ if (!_peerConnection || !_hasReceivedSdp) {
+ return;
+ }
+ for (ARDSignalingMessage *message in _messageQueue) {
+ [self processSignalingMessage:message];
+ }
+ [_messageQueue removeAllObjects];
+}
+
+// Processes the given signaling message based on its type.
+- (void)processSignalingMessage:(ARDSignalingMessage *)message {
+ NSParameterAssert(_peerConnection ||
+ message.type == kARDSignalingMessageTypeBye);
+ switch (message.type) {
+ case kARDSignalingMessageTypeOffer:
+ case kARDSignalingMessageTypeAnswer: {
+ ARDSessionDescriptionMessage *sdpMessage =
+ (ARDSessionDescriptionMessage *)message;
+ RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription;
+ __weak ARDAppClient *weakSelf = self;
+ [_peerConnection setRemoteDescription:description
+ completionHandler:^(NSError *error) {
+ ARDAppClient *strongSelf = weakSelf;
+ [strongSelf peerConnection:strongSelf.peerConnection
+ didSetSessionDescriptionWithError:error];
+ }];
+ break;
+ }
+ case kARDSignalingMessageTypeCandidate: {
+ ARDICECandidateMessage *candidateMessage =
+ (ARDICECandidateMessage *)message;
+ __weak ARDAppClient *weakSelf = self;
+ [_peerConnection addIceCandidate:candidateMessage.candidate
+ completionHandler:^(NSError *error) {
+ ARDAppClient *strongSelf = weakSelf;
+ if (error) {
+ [strongSelf.delegate appClient:strongSelf didError:error];
+ }
+ }];
+ break;
+ }
+ case kARDSignalingMessageTypeCandidateRemoval: {
+ ARDICECandidateRemovalMessage *candidateMessage =
+ (ARDICECandidateRemovalMessage *)message;
+ [_peerConnection removeIceCandidates:candidateMessage.candidates];
+ break;
+ }
+ case kARDSignalingMessageTypeBye:
+ // Other client disconnected.
+ // TODO(tkchin): support waiting in room for next client. For now just
+ // disconnect.
+ [self disconnect];
+ break;
+ }
+}
+
+// Sends a signaling message to the other client. The caller will send messages
+// through the room server, whereas the callee will send messages over the
+// signaling channel.
+- (void)sendSignalingMessage:(ARDSignalingMessage *)message {
+ if (_isInitiator) {
+ __weak ARDAppClient *weakSelf = self;
+ [_roomServerClient sendMessage:message
+ forRoomId:_roomId
+ clientId:_clientId
+ completionHandler:^(ARDMessageResponse *response,
+ NSError *error) {
+ ARDAppClient *strongSelf = weakSelf;
+ if (error) {
+ [strongSelf.delegate appClient:strongSelf didError:error];
+ return;
+ }
+ NSError *messageError =
+ [[strongSelf class] errorForMessageResultType:response.result];
+ if (messageError) {
+ [strongSelf.delegate appClient:strongSelf didError:messageError];
+ return;
+ }
+ }];
+ } else {
+ [_channel sendMessage:message];
+ }
+}
+
+- (void)setMaxBitrateForPeerConnectionVideoSender {
+ for (RTC_OBJC_TYPE(RTCRtpSender) * sender in _peerConnection.senders) {
+ if (sender.track != nil) {
+ if ([sender.track.kind isEqualToString:kARDVideoTrackKind]) {
+ [self setMaxBitrate:[_settings currentMaxBitrateSettingFromStore] forVideoSender:sender];
+ }
+ }
+ }
+}
+
+- (void)setMaxBitrate:(NSNumber *)maxBitrate forVideoSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender {
+ if (maxBitrate.intValue <= 0) {
+ return;
+ }
+
+ RTC_OBJC_TYPE(RTCRtpParameters) *parametersToModify = sender.parameters;
+ for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * encoding in parametersToModify.encodings) {
+ encoding.maxBitrateBps = @(maxBitrate.intValue * kKbpsMultiplier);
+ }
+ [sender setParameters:parametersToModify];
+}
+
+- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)videoTransceiver {
+ for (RTC_OBJC_TYPE(RTCRtpTransceiver) * transceiver in _peerConnection.transceivers) {
+ if (transceiver.mediaType == RTCRtpMediaTypeVideo) {
+ return transceiver;
+ }
+ }
+ return nil;
+}
+
+- (void)createMediaSenders {
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultMediaAudioConstraints];
+ RTC_OBJC_TYPE(RTCAudioSource) *source = [_factory audioSourceWithConstraints:constraints];
+ RTC_OBJC_TYPE(RTCAudioTrack) *track = [_factory audioTrackWithSource:source
+ trackId:kARDAudioTrackId];
+ [_peerConnection addTrack:track streamIds:@[ kARDMediaStreamId ]];
+ _localVideoTrack = [self createLocalVideoTrack];
+ if (_localVideoTrack) {
+ [_peerConnection addTrack:_localVideoTrack streamIds:@[ kARDMediaStreamId ]];
+ [_delegate appClient:self didReceiveLocalVideoTrack:_localVideoTrack];
+ // We can set up rendering for the remote track right away since the transceiver already has an
+ // RTC_OBJC_TYPE(RTCRtpReceiver) with a track. The track will automatically get unmuted and
+ // produce frames once RTP is received.
+ RTC_OBJC_TYPE(RTCVideoTrack) *track =
+ (RTC_OBJC_TYPE(RTCVideoTrack) *)([self videoTransceiver].receiver.track);
+ [_delegate appClient:self didReceiveRemoteVideoTrack:track];
+ }
+}
+
+- (RTC_OBJC_TYPE(RTCVideoTrack) *)createLocalVideoTrack {
+ if ([_settings currentAudioOnlySettingFromStore]) {
+ return nil;
+ }
+
+ RTC_OBJC_TYPE(RTCVideoSource) *source = [_factory videoSource];
+
+#if !TARGET_IPHONE_SIMULATOR
+ if (self.isBroadcast) {
+ ARDExternalSampleCapturer *capturer =
+ [[ARDExternalSampleCapturer alloc] initWithDelegate:source];
+ [_delegate appClient:self didCreateLocalExternalSampleCapturer:capturer];
+ } else {
+ RTC_OBJC_TYPE(RTCCameraVideoCapturer) *capturer =
+ [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:source];
+ [_delegate appClient:self didCreateLocalCapturer:capturer];
+ }
+#else
+#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
+ if (@available(iOS 10, *)) {
+ RTC_OBJC_TYPE(RTCFileVideoCapturer) *fileCapturer =
+ [[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc] initWithDelegate:source];
+ [_delegate appClient:self didCreateLocalFileCapturer:fileCapturer];
+ }
+#endif
+#endif
+
+ return [_factory videoTrackWithSource:source trackId:kARDVideoTrackId];
+}
+
+#pragma mark - Collider methods
+
+- (void)registerWithColliderIfReady {
+ if (!self.hasJoinedRoomServerRoom) {
+ return;
+ }
+ // Open WebSocket connection.
+ if (!_channel) {
+ _channel =
+ [[ARDWebSocketChannel alloc] initWithURL:_websocketURL
+ restURL:_websocketRestURL
+ delegate:self];
+ if (_isLoopback) {
+ _loopbackChannel =
+ [[ARDLoopbackWebSocketChannel alloc] initWithURL:_websocketURL
+ restURL:_websocketRestURL];
+ }
+ }
+ [_channel registerForRoomId:_roomId clientId:_clientId];
+ if (_isLoopback) {
+ [_loopbackChannel registerForRoomId:_roomId clientId:@"LOOPBACK_CLIENT_ID"];
+ }
+}
+
+#pragma mark - Defaults
+
+- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultMediaAudioConstraints {
+ NSDictionary *mandatoryConstraints = @{};
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints
+ optionalConstraints:nil];
+ return constraints;
+}
+
+- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultAnswerConstraints {
+ return [self defaultOfferConstraints];
+}
+
+- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultOfferConstraints {
+ NSDictionary *mandatoryConstraints = @{
+ @"OfferToReceiveAudio" : @"true",
+ @"OfferToReceiveVideo" : @"true"
+ };
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints
+ optionalConstraints:nil];
+ return constraints;
+}
+
+- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultPeerConnectionConstraints {
+ if (_defaultPeerConnectionConstraints) {
+ return _defaultPeerConnectionConstraints;
+ }
+ NSString *value = _isLoopback ? @"false" : @"true";
+ NSDictionary *optionalConstraints = @{ @"DtlsSrtpKeyAgreement" : value };
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
+ optionalConstraints:optionalConstraints];
+ return constraints;
+}
+
+#pragma mark - Errors
+
++ (NSError *)errorForJoinResultType:(ARDJoinResultType)resultType {
+ NSError *error = nil;
+ switch (resultType) {
+ case kARDJoinResultTypeSuccess:
+ break;
+ case kARDJoinResultTypeUnknown: {
+ error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
+ code:kARDAppClientErrorUnknown
+ userInfo:@{
+ NSLocalizedDescriptionKey: @"Unknown error.",
+ }];
+ break;
+ }
+ case kARDJoinResultTypeFull: {
+ error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
+ code:kARDAppClientErrorRoomFull
+ userInfo:@{
+ NSLocalizedDescriptionKey: @"Room is full.",
+ }];
+ break;
+ }
+ }
+ return error;
+}
+
++ (NSError *)errorForMessageResultType:(ARDMessageResultType)resultType {
+ NSError *error = nil;
+ switch (resultType) {
+ case kARDMessageResultTypeSuccess:
+ break;
+ case kARDMessageResultTypeUnknown:
+ error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
+ code:kARDAppClientErrorUnknown
+ userInfo:@{
+ NSLocalizedDescriptionKey: @"Unknown error.",
+ }];
+ break;
+ case kARDMessageResultTypeInvalidClient:
+ error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
+ code:kARDAppClientErrorInvalidClient
+ userInfo:@{
+ NSLocalizedDescriptionKey: @"Invalid client.",
+ }];
+ break;
+ case kARDMessageResultTypeInvalidRoom:
+ error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
+ code:kARDAppClientErrorInvalidRoom
+ userInfo:@{
+ NSLocalizedDescriptionKey: @"Invalid room.",
+ }];
+ break;
+ }
+ return error;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.h
new file mode 100644
index 0000000000..7514f3645c
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.h
@@ -0,0 +1,14 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDRoomServerClient.h"
+
+@interface ARDAppEngineClient : NSObject <ARDRoomServerClient>
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.m
new file mode 100644
index 0000000000..5139de60d6
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDAppEngineClient.m
@@ -0,0 +1,175 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDAppEngineClient.h"
+
+#import "sdk/objc/base/RTCLogging.h"
+
+#import "ARDJoinResponse.h"
+#import "ARDMessageResponse.h"
+#import "ARDSignalingMessage.h"
+#import "ARDUtilities.h"
+
+// TODO(tkchin): move these to a configuration object.
+static NSString * const kARDRoomServerHostUrl =
+ @"https://appr.tc";
+static NSString * const kARDRoomServerJoinFormat =
+ @"https://appr.tc/join/%@";
+static NSString * const kARDRoomServerJoinFormatLoopback =
+ @"https://appr.tc/join/%@?debug=loopback";
+static NSString * const kARDRoomServerMessageFormat =
+ @"https://appr.tc/message/%@/%@";
+static NSString * const kARDRoomServerLeaveFormat =
+ @"https://appr.tc/leave/%@/%@";
+
+static NSString * const kARDAppEngineClientErrorDomain = @"ARDAppEngineClient";
+static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
+
+@implementation ARDAppEngineClient
+
+#pragma mark - ARDRoomServerClient
+
+- (void)joinRoomWithRoomId:(NSString *)roomId
+ isLoopback:(BOOL)isLoopback
+ completionHandler:(void (^)(ARDJoinResponse *response,
+ NSError *error))completionHandler {
+ NSParameterAssert(roomId.length);
+
+ NSString *urlString = nil;
+ if (isLoopback) {
+ urlString =
+ [NSString stringWithFormat:kARDRoomServerJoinFormatLoopback, roomId];
+ } else {
+ urlString =
+ [NSString stringWithFormat:kARDRoomServerJoinFormat, roomId];
+ }
+
+ NSURL *roomURL = [NSURL URLWithString:urlString];
+ RTCLog(@"Joining room:%@ on room server.", roomId);
+ NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:roomURL];
+ request.HTTPMethod = @"POST";
+ [NSURLConnection sendAsyncRequest:request
+ completionHandler:^(NSURLResponse *response, NSData *data, NSError *error) {
+ if (error) {
+ if (completionHandler) {
+ completionHandler(nil, error);
+ }
+ return;
+ }
+ ARDJoinResponse *joinResponse = [ARDJoinResponse responseFromJSONData:data];
+ if (!joinResponse) {
+ if (completionHandler) {
+ NSError *error = [[self class] badResponseError];
+ completionHandler(nil, error);
+ }
+ return;
+ }
+ if (completionHandler) {
+ completionHandler(joinResponse, nil);
+ }
+ }];
+}
+
+- (void)sendMessage:(ARDSignalingMessage *)message
+ forRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId
+ completionHandler:(void (^)(ARDMessageResponse *response,
+ NSError *error))completionHandler {
+ NSParameterAssert(message);
+ NSParameterAssert(roomId.length);
+ NSParameterAssert(clientId.length);
+
+ NSData *data = [message JSONData];
+ NSString *urlString =
+ [NSString stringWithFormat:
+ kARDRoomServerMessageFormat, roomId, clientId];
+ NSURL *url = [NSURL URLWithString:urlString];
+ RTCLog(@"C->RS POST: %@", message);
+ NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
+ request.HTTPMethod = @"POST";
+ request.HTTPBody = data;
+ [NSURLConnection sendAsyncRequest:request
+ completionHandler:^(NSURLResponse *response,
+ NSData *data,
+ NSError *error) {
+ if (error) {
+ if (completionHandler) {
+ completionHandler(nil, error);
+ }
+ return;
+ }
+ ARDMessageResponse *messageResponse =
+ [ARDMessageResponse responseFromJSONData:data];
+ if (!messageResponse) {
+ if (completionHandler) {
+ NSError *error = [[self class] badResponseError];
+ completionHandler(nil, error);
+ }
+ return;
+ }
+ if (completionHandler) {
+ completionHandler(messageResponse, nil);
+ }
+ }];
+}
+
+- (void)leaveRoomWithRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId
+ completionHandler:(void (^)(NSError *error))completionHandler {
+ NSParameterAssert(roomId.length);
+ NSParameterAssert(clientId.length);
+
+ NSString *urlString =
+ [NSString stringWithFormat:kARDRoomServerLeaveFormat, roomId, clientId];
+ NSURL *url = [NSURL URLWithString:urlString];
+ NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
+ request.HTTPMethod = @"POST";
+
+ RTCLog(@"C->RS: BYE");
+ __block NSError *error = nil;
+
+ // We want a synchronous request so that we know that we've left the room on
+ // room server before we do any further work.
+ dispatch_semaphore_t sem = dispatch_semaphore_create(0);
+ [NSURLConnection sendAsyncRequest:request
+ completionHandler:^(NSURLResponse *response, NSData *data, NSError *e) {
+ if (e) {
+ error = e;
+ }
+ dispatch_semaphore_signal(sem);
+ }];
+
+ dispatch_semaphore_wait(sem, DISPATCH_TIME_FOREVER);
+ if (error) {
+ RTCLogError(@"Error leaving room %@ on room server: %@", roomId, error.localizedDescription);
+ if (completionHandler) {
+ completionHandler(error);
+ }
+ return;
+ }
+ RTCLog(@"Left room:%@ on room server.", roomId);
+ if (completionHandler) {
+ completionHandler(nil);
+ }
+}
+
+#pragma mark - Private
+
++ (NSError *)badResponseError {
+ NSError *error =
+ [[NSError alloc] initWithDomain:kARDAppEngineClientErrorDomain
+ code:kARDAppEngineClientErrorBadResponse
+ userInfo:@{
+ NSLocalizedDescriptionKey: @"Error parsing response.",
+ }];
+ return error;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.h
new file mode 100644
index 0000000000..4febccee96
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "sdk/objc/components/capturer/RTCCameraVideoCapturer.h"
+
+@class ARDSettingsModel;
+
+// Controls the camera. Handles starting the capture, switching cameras etc.
+@interface ARDCaptureController : NSObject
+
+- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
+ settings:(ARDSettingsModel *)settings;
+- (void)startCapture;
+- (void)startCapture:(void (^)(NSError *))completion;
+- (void)stopCapture;
+- (void)switchCamera;
+- (void)switchCamera:(void (^)(NSError *))completion;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.m
new file mode 100644
index 0000000000..26cce9fdaa
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDCaptureController.m
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDCaptureController.h"
+
+#import "sdk/objc/base/RTCLogging.h"
+
+#import "ARDSettingsModel.h"
+
+const Float64 kFramerateLimit = 30.0;
+
+@implementation ARDCaptureController {
+ RTC_OBJC_TYPE(RTCCameraVideoCapturer) * _capturer;
+ ARDSettingsModel *_settings;
+ BOOL _usingFrontCamera;
+}
+
+- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
+ settings:(ARDSettingsModel *)settings {
+ if (self = [super init]) {
+ _capturer = capturer;
+ _settings = settings;
+ _usingFrontCamera = YES;
+ }
+
+ return self;
+}
+
+- (void)startCapture {
+ [self startCapture:nil];
+}
+
+- (void)startCapture:(void (^)(NSError *))completion {
+ AVCaptureDevicePosition position =
+ _usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
+ AVCaptureDevice *device = [self findDeviceForPosition:position];
+ AVCaptureDeviceFormat *format = [self selectFormatForDevice:device];
+
+ if (format == nil) {
+ RTCLogError(@"No valid formats for device %@", device);
+ NSAssert(NO, @"");
+
+ return;
+ }
+
+ NSInteger fps = [self selectFpsForFormat:format];
+
+ [_capturer startCaptureWithDevice:device format:format fps:fps completionHandler:completion];
+}
+
+- (void)stopCapture {
+ [_capturer stopCapture];
+}
+
+- (void)switchCamera {
+ _usingFrontCamera = !_usingFrontCamera;
+ [self startCapture:nil];
+}
+
+- (void)switchCamera:(void (^)(NSError *))completion {
+ _usingFrontCamera = !_usingFrontCamera;
+ [self startCapture:completion];
+}
+
+#pragma mark - Private
+
+- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position {
+ NSArray<AVCaptureDevice *> *captureDevices =
+ [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices];
+ for (AVCaptureDevice *device in captureDevices) {
+ if (device.position == position) {
+ return device;
+ }
+ }
+ return captureDevices[0];
+}
+
+- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device {
+ NSArray<AVCaptureDeviceFormat *> *formats =
+ [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device];
+ int targetWidth = [_settings currentVideoResolutionWidthFromStore];
+ int targetHeight = [_settings currentVideoResolutionHeightFromStore];
+ AVCaptureDeviceFormat *selectedFormat = nil;
+ int currentDiff = INT_MAX;
+
+ for (AVCaptureDeviceFormat *format in formats) {
+ CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription);
+ int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height);
+ if (diff < currentDiff) {
+ selectedFormat = format;
+ currentDiff = diff;
+ } else if (diff == currentDiff && pixelFormat == [_capturer preferredOutputPixelFormat]) {
+ selectedFormat = format;
+ }
+ }
+
+ return selectedFormat;
+}
+
+- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat *)format {
+ Float64 maxSupportedFramerate = 0;
+ for (AVFrameRateRange *fpsRange in format.videoSupportedFrameRateRanges) {
+ maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate);
+ }
+ return fmin(maxSupportedFramerate, kFramerateLimit);
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.h
new file mode 100644
index 0000000000..7c32c4b509
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "sdk/objc/base/RTCVideoCapturer.h"
+
+@protocol ARDExternalSampleDelegate <NSObject>
+- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer;
+@end
+
+@interface ARDExternalSampleCapturer : RTC_OBJC_TYPE
+(RTCVideoCapturer)<ARDExternalSampleDelegate> @end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m
new file mode 100644
index 0000000000..8bf6716ddb
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDExternalSampleCapturer.h"
+
+#import "sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h"
+#import "sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h"
+#import "sdk/objc/base/RTCI420Buffer.h"
+#import "sdk/objc/base/RTCMutableI420Buffer.h"
+#import "sdk/objc/base/RTCMutableYUVPlanarBuffer.h"
+#import "sdk/objc/base/RTCVideoFrameBuffer.h"
+#import "sdk/objc/base/RTCYUVPlanarBuffer.h"
+#import "sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+@implementation ARDExternalSampleCapturer
+
+- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
+ return [super initWithDelegate:delegate];
+}
+
+#pragma mark - ARDExternalSampleDelegate
+
+- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer {
+ if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
+ !CMSampleBufferDataIsReady(sampleBuffer)) {
+ return;
+ }
+
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ if (pixelBuffer == nil) {
+ return;
+ }
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
+ int64_t timeStampNs =
+ CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC;
+ RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:timeStampNs];
+ [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse+Internal.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse+Internal.h
new file mode 100644
index 0000000000..0edf7083c0
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse+Internal.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDJoinResponse.h"
+
+@interface ARDJoinResponse ()
+
+@property(nonatomic, assign) ARDJoinResultType result;
+@property(nonatomic, assign) BOOL isInitiator;
+@property(nonatomic, strong) NSString* roomId;
+@property(nonatomic, strong) NSString* clientId;
+@property(nonatomic, strong) NSArray* messages;
+@property(nonatomic, strong) NSURL* webSocketURL;
+@property(nonatomic, strong) NSURL* webSocketRestURL;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.h
new file mode 100644
index 0000000000..2911202af1
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+typedef NS_ENUM(NSInteger, ARDJoinResultType) {
+ kARDJoinResultTypeUnknown,
+ kARDJoinResultTypeSuccess,
+ kARDJoinResultTypeFull
+};
+
+// Result of joining a room on the room server.
+@interface ARDJoinResponse : NSObject
+
+@property(nonatomic, readonly) ARDJoinResultType result;
+@property(nonatomic, readonly) BOOL isInitiator;
+@property(nonatomic, readonly) NSString *roomId;
+@property(nonatomic, readonly) NSString *clientId;
+@property(nonatomic, readonly) NSArray *messages;
+@property(nonatomic, readonly) NSURL *webSocketURL;
+@property(nonatomic, readonly) NSURL *webSocketRestURL;
+
++ (ARDJoinResponse *)responseFromJSONData:(NSData *)data;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.m
new file mode 100644
index 0000000000..87d58e0db1
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDJoinResponse.m
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDJoinResponse+Internal.h"
+
+#import "ARDSignalingMessage.h"
+#import "ARDUtilities.h"
+#import "RTCIceServer+JSON.h"
+
+static NSString const *kARDJoinResultKey = @"result";
+static NSString const *kARDJoinResultParamsKey = @"params";
+static NSString const *kARDJoinInitiatorKey = @"is_initiator";
+static NSString const *kARDJoinRoomIdKey = @"room_id";
+static NSString const *kARDJoinClientIdKey = @"client_id";
+static NSString const *kARDJoinMessagesKey = @"messages";
+static NSString const *kARDJoinWebSocketURLKey = @"wss_url";
+static NSString const *kARDJoinWebSocketRestURLKey = @"wss_post_url";
+
+@implementation ARDJoinResponse
+
+@synthesize result = _result;
+@synthesize isInitiator = _isInitiator;
+@synthesize roomId = _roomId;
+@synthesize clientId = _clientId;
+@synthesize messages = _messages;
+@synthesize webSocketURL = _webSocketURL;
+@synthesize webSocketRestURL = _webSocketRestURL;
+
++ (ARDJoinResponse *)responseFromJSONData:(NSData *)data {
+ NSDictionary *responseJSON = [NSDictionary dictionaryWithJSONData:data];
+ if (!responseJSON) {
+ return nil;
+ }
+ ARDJoinResponse *response = [[ARDJoinResponse alloc] init];
+ NSString *resultString = responseJSON[kARDJoinResultKey];
+ response.result = [[self class] resultTypeFromString:resultString];
+ NSDictionary *params = responseJSON[kARDJoinResultParamsKey];
+
+ response.isInitiator = [params[kARDJoinInitiatorKey] boolValue];
+ response.roomId = params[kARDJoinRoomIdKey];
+ response.clientId = params[kARDJoinClientIdKey];
+
+ // Parse messages.
+ NSArray *messages = params[kARDJoinMessagesKey];
+ NSMutableArray *signalingMessages =
+ [NSMutableArray arrayWithCapacity:messages.count];
+ for (NSString *message in messages) {
+ ARDSignalingMessage *signalingMessage =
+ [ARDSignalingMessage messageFromJSONString:message];
+ [signalingMessages addObject:signalingMessage];
+ }
+ response.messages = signalingMessages;
+
+ // Parse websocket urls.
+ NSString *webSocketURLString = params[kARDJoinWebSocketURLKey];
+ response.webSocketURL = [NSURL URLWithString:webSocketURLString];
+ NSString *webSocketRestURLString = params[kARDJoinWebSocketRestURLKey];
+ response.webSocketRestURL = [NSURL URLWithString:webSocketRestURLString];
+
+ return response;
+}
+
+#pragma mark - Private
+
++ (ARDJoinResultType)resultTypeFromString:(NSString *)resultString {
+ ARDJoinResultType result = kARDJoinResultTypeUnknown;
+ if ([resultString isEqualToString:@"SUCCESS"]) {
+ result = kARDJoinResultTypeSuccess;
+ } else if ([resultString isEqualToString:@"FULL"]) {
+ result = kARDJoinResultTypeFull;
+ }
+ return result;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse+Internal.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse+Internal.h
new file mode 100644
index 0000000000..66ee76172f
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse+Internal.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDMessageResponse.h"
+
+@interface ARDMessageResponse ()
+
+@property(nonatomic, assign) ARDMessageResultType result;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.h
new file mode 100644
index 0000000000..65468cdf78
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+typedef NS_ENUM(NSInteger, ARDMessageResultType) {
+ kARDMessageResultTypeUnknown,
+ kARDMessageResultTypeSuccess,
+ kARDMessageResultTypeInvalidRoom,
+ kARDMessageResultTypeInvalidClient
+};
+
+@interface ARDMessageResponse : NSObject
+
+@property(nonatomic, readonly) ARDMessageResultType result;
+
++ (ARDMessageResponse *)responseFromJSONData:(NSData *)data;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.m
new file mode 100644
index 0000000000..0f5383f6d6
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDMessageResponse.m
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDMessageResponse+Internal.h"
+
+#import "ARDUtilities.h"
+
+static NSString const *kARDMessageResultKey = @"result";
+
+@implementation ARDMessageResponse
+
+@synthesize result = _result;
+
++ (ARDMessageResponse *)responseFromJSONData:(NSData *)data {
+ NSDictionary *responseJSON = [NSDictionary dictionaryWithJSONData:data];
+ if (!responseJSON) {
+ return nil;
+ }
+ ARDMessageResponse *response = [[ARDMessageResponse alloc] init];
+ response.result =
+ [[self class] resultTypeFromString:responseJSON[kARDMessageResultKey]];
+ return response;
+}
+
+#pragma mark - Private
+
++ (ARDMessageResultType)resultTypeFromString:(NSString *)resultString {
+ ARDMessageResultType result = kARDMessageResultTypeUnknown;
+ if ([resultString isEqualToString:@"SUCCESS"]) {
+ result = kARDMessageResultTypeSuccess;
+ } else if ([resultString isEqualToString:@"INVALID_CLIENT"]) {
+ result = kARDMessageResultTypeInvalidClient;
+ } else if ([resultString isEqualToString:@"INVALID_ROOM"]) {
+ result = kARDMessageResultTypeInvalidRoom;
+ }
+ return result;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDRoomServerClient.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDRoomServerClient.h
new file mode 100644
index 0000000000..3a5818d6d6
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDRoomServerClient.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+@class ARDJoinResponse;
+@class ARDMessageResponse;
+@class ARDSignalingMessage;
+
+@protocol ARDRoomServerClient <NSObject>
+
+- (void)joinRoomWithRoomId:(NSString *)roomId
+ isLoopback:(BOOL)isLoopback
+ completionHandler:(void (^)(ARDJoinResponse *response, NSError *error))completionHandler;
+
+- (void)sendMessage:(ARDSignalingMessage *)message
+ forRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId
+ completionHandler:(void (^)(ARDMessageResponse *response, NSError *error))completionHandler;
+
+- (void)leaveRoomWithRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId
+ completionHandler:(void (^)(NSError *error))completionHandler;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel+Private.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel+Private.h
new file mode 100644
index 0000000000..dc3f24ced8
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel+Private.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "ARDSettingsModel.h"
+
+@class ARDSettingsStore;
+
+NS_ASSUME_NONNULL_BEGIN
+@interface ARDSettingsModel ()
+- (ARDSettingsStore *)settingsStore;
+@end
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.h
new file mode 100644
index 0000000000..47c7defacd
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.h
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "sdk/objc/base/RTCVideoCodecInfo.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Model class for user defined settings.
+ *
+ * Handles storing the settings and provides default values if setting is not
+ * set. Also provides list of available options for different settings. Stores
+ * for example video codec, video resolution and maximum bitrate.
+ */
+@interface ARDSettingsModel : NSObject
+
+/**
+ * Returns array of available capture resoultions.
+ *
+ * The capture resolutions are represented as strings in the following format
+ * [width]x[height]
+ */
+- (NSArray<NSString *> *)availableVideoResolutions;
+
+/**
+ * Returns current video resolution string.
+ * If no resolution is in store, default value of 640x480 is returned.
+ * When defaulting to value, the default is saved in store for consistency reasons.
+ */
+- (NSString *)currentVideoResolutionSettingFromStore;
+- (int)currentVideoResolutionWidthFromStore;
+- (int)currentVideoResolutionHeightFromStore;
+
+/**
+ * Stores the provided video resolution string into the store.
+ *
+ * If the provided resolution is no part of the available video resolutions
+ * the store operation will not be executed and NO will be returned.
+ * @param resolution the string to be stored.
+ * @return YES/NO depending on success.
+ */
+- (BOOL)storeVideoResolutionSetting:(NSString *)resolution;
+
+/**
+ * Returns array of available video codecs.
+ */
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)availableVideoCodecs;
+
+/**
+ * Returns current video codec setting from store if present or default (H264) otherwise.
+ */
+- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)currentVideoCodecSettingFromStore;
+
+/**
+ * Stores the provided video codec setting into the store.
+ *
+ * If the provided video codec is not part of the available video codecs
+ * the store operation will not be executed and NO will be returned.
+ * @param video codec settings the string to be stored.
+ * @return YES/NO depending on success.
+ */
+- (BOOL)storeVideoCodecSetting:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodec;
+
+/**
+ * Returns current max bitrate setting from store if present.
+ */
+- (nullable NSNumber *)currentMaxBitrateSettingFromStore;
+
+/**
+ * Stores the provided bitrate value into the store.
+ *
+ * @param bitrate NSNumber representation of the max bitrate value.
+ */
+- (void)storeMaxBitrateSetting:(nullable NSNumber *)bitrate;
+
+/**
+ * Returns current audio only setting from store if present or default (NO) otherwise.
+ */
+- (BOOL)currentAudioOnlySettingFromStore;
+
+/**
+ * Stores the provided audio only setting into the store.
+ *
+ * @param setting the boolean value to be stored.
+ */
+- (void)storeAudioOnlySetting:(BOOL)audioOnly;
+
+/**
+ * Returns current create AecDump setting from store if present or default (NO) otherwise.
+ */
+- (BOOL)currentCreateAecDumpSettingFromStore;
+
+/**
+ * Stores the provided create AecDump setting into the store.
+ *
+ * @param setting the boolean value to be stored.
+ */
+- (void)storeCreateAecDumpSetting:(BOOL)createAecDump;
+
+/**
+ * Returns current setting whether to use manual audio config from store if present or default (YES)
+ * otherwise.
+ */
+- (BOOL)currentUseManualAudioConfigSettingFromStore;
+
+/**
+ * Stores the provided use manual audio config setting into the store.
+ *
+ * @param setting the boolean value to be stored.
+ */
+- (void)storeUseManualAudioConfigSetting:(BOOL)useManualAudioConfig;
+
+@end
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.m
new file mode 100644
index 0000000000..9e709b0553
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsModel.m
@@ -0,0 +1,211 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDSettingsModel+Private.h"
+#import "ARDSettingsStore.h"
+
+#import "sdk/objc/api/peerconnection/RTCMediaConstraints.h"
+#import "sdk/objc/components/capturer/RTCCameraVideoCapturer.h"
+#import "sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface ARDSettingsModel () {
+ ARDSettingsStore *_settingsStore;
+}
+@end
+
+@implementation ARDSettingsModel
+
+- (NSArray<NSString *> *)availableVideoResolutions {
+ NSMutableSet<NSArray<NSNumber *> *> *resolutions =
+ [[NSMutableSet<NSArray<NSNumber *> *> alloc] init];
+ for (AVCaptureDevice *device in [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices]) {
+ for (AVCaptureDeviceFormat *format in
+ [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device]) {
+ CMVideoDimensions resolution =
+ CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ NSArray<NSNumber *> *resolutionObject = @[ @(resolution.width), @(resolution.height) ];
+ [resolutions addObject:resolutionObject];
+ }
+ }
+
+ NSArray<NSArray<NSNumber *> *> *sortedResolutions =
+ [[resolutions allObjects] sortedArrayUsingComparator:^NSComparisonResult(
+ NSArray<NSNumber *> *obj1, NSArray<NSNumber *> *obj2) {
+ NSComparisonResult cmp = [obj1.firstObject compare:obj2.firstObject];
+ if (cmp != NSOrderedSame) {
+ return cmp;
+ }
+ return [obj1.lastObject compare:obj2.lastObject];
+ }];
+
+ NSMutableArray<NSString *> *resolutionStrings = [[NSMutableArray<NSString *> alloc] init];
+ for (NSArray<NSNumber *> *resolution in sortedResolutions) {
+ NSString *resolutionString =
+ [NSString stringWithFormat:@"%@x%@", resolution.firstObject, resolution.lastObject];
+ [resolutionStrings addObject:resolutionString];
+ }
+
+ return [resolutionStrings copy];
+}
+
+- (NSString *)currentVideoResolutionSettingFromStore {
+ [self registerStoreDefaults];
+ return [[self settingsStore] videoResolution];
+}
+
+- (BOOL)storeVideoResolutionSetting:(NSString *)resolution {
+ if (![[self availableVideoResolutions] containsObject:resolution]) {
+ return NO;
+ }
+ [[self settingsStore] setVideoResolution:resolution];
+ return YES;
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)availableVideoCodecs {
+ return [RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) supportedCodecs];
+}
+
+- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)currentVideoCodecSettingFromStore {
+ [self registerStoreDefaults];
+ NSData *codecData = [[self settingsStore] videoCodec];
+#if defined(WEBRTC_IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_13
+ Class expectedClass = [RTC_OBJC_TYPE(RTCVideoCodecInfo) class];
+ NSError *error;
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodecSetting =
+ [NSKeyedUnarchiver unarchivedObjectOfClass:expectedClass fromData:codecData error:&error];
+ if (!error) {
+ return videoCodecSetting;
+ }
+ return nil;
+#else
+ return [NSKeyedUnarchiver unarchiveObjectWithData:codecData];
+#endif
+}
+
+- (BOOL)storeVideoCodecSetting:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodec {
+ if (![[self availableVideoCodecs] containsObject:videoCodec]) {
+ return NO;
+ }
+
+#if defined(WEBRTC_IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_13
+ NSError *error;
+ NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:videoCodec
+ requiringSecureCoding:NO
+ error:&error];
+ if (error) {
+ return NO;
+ }
+#else
+ NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:videoCodec];
+#endif
+
+ [[self settingsStore] setVideoCodec:codecData];
+ return YES;
+}
+
+- (nullable NSNumber *)currentMaxBitrateSettingFromStore {
+ [self registerStoreDefaults];
+ return [[self settingsStore] maxBitrate];
+}
+
+- (void)storeMaxBitrateSetting:(nullable NSNumber *)bitrate {
+ [[self settingsStore] setMaxBitrate:bitrate];
+}
+
+- (BOOL)currentAudioOnlySettingFromStore {
+ return [[self settingsStore] audioOnly];
+}
+
+- (void)storeAudioOnlySetting:(BOOL)audioOnly {
+ [[self settingsStore] setAudioOnly:audioOnly];
+}
+
+- (BOOL)currentCreateAecDumpSettingFromStore {
+ return [[self settingsStore] createAecDump];
+}
+
+- (void)storeCreateAecDumpSetting:(BOOL)createAecDump {
+ [[self settingsStore] setCreateAecDump:createAecDump];
+}
+
+- (BOOL)currentUseManualAudioConfigSettingFromStore {
+ return [[self settingsStore] useManualAudioConfig];
+}
+
+- (void)storeUseManualAudioConfigSetting:(BOOL)useManualAudioConfig {
+ [[self settingsStore] setUseManualAudioConfig:useManualAudioConfig];
+}
+
+#pragma mark - Testable
+
+- (ARDSettingsStore *)settingsStore {
+ if (!_settingsStore) {
+ _settingsStore = [[ARDSettingsStore alloc] init];
+ [self registerStoreDefaults];
+ }
+ return _settingsStore;
+}
+
+- (int)currentVideoResolutionWidthFromStore {
+ NSString *resolution = [self currentVideoResolutionSettingFromStore];
+
+ return [self videoResolutionComponentAtIndex:0 inString:resolution];
+}
+
+- (int)currentVideoResolutionHeightFromStore {
+ NSString *resolution = [self currentVideoResolutionSettingFromStore];
+ return [self videoResolutionComponentAtIndex:1 inString:resolution];
+}
+
+#pragma mark -
+
+- (NSString *)defaultVideoResolutionSetting {
+ return [self availableVideoResolutions].firstObject;
+}
+
+- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)defaultVideoCodecSetting {
+ return [self availableVideoCodecs].firstObject;
+}
+
+- (int)videoResolutionComponentAtIndex:(int)index inString:(NSString *)resolution {
+ if (index != 0 && index != 1) {
+ return 0;
+ }
+ NSArray<NSString *> *components = [resolution componentsSeparatedByString:@"x"];
+ if (components.count != 2) {
+ return 0;
+ }
+ return components[index].intValue;
+}
+
+- (void)registerStoreDefaults {
+#if defined(WEBRTC_IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_13
+ NSError *error;
+ NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:[self defaultVideoCodecSetting]
+ requiringSecureCoding:NO
+ error:&error];
+ if (error) {
+ return;
+ }
+#else
+ NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:[self defaultVideoCodecSetting]];
+#endif
+
+ [ARDSettingsStore setDefaultsForVideoResolution:[self defaultVideoResolutionSetting]
+ videoCodec:codecData
+ bitrate:nil
+ audioOnly:NO
+ createAecDump:NO
+ useManualAudioConfig:YES];
+}
+@end
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.h
new file mode 100644
index 0000000000..bb051dbb26
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Light-weight persistent store for user settings.
+ *
+ * It will persist between application launches and application updates.
+ */
+@interface ARDSettingsStore : NSObject
+
+/**
+ * Set fallback values in case the setting has not been written by the user.
+ * @param dictionary of values to store
+ */
++ (void)setDefaultsForVideoResolution:(NSString *)videoResolution
+ videoCodec:(NSData *)videoCodec
+ bitrate:(nullable NSNumber *)bitrate
+ audioOnly:(BOOL)audioOnly
+ createAecDump:(BOOL)createAecDump
+ useManualAudioConfig:(BOOL)useManualAudioConfig;
+
+@property(nonatomic) NSString *videoResolution;
+@property(nonatomic) NSData *videoCodec;
+
+/**
+ * Returns current max bitrate number stored in the store.
+ */
+- (nullable NSNumber *)maxBitrate;
+
+/**
+ * Stores the provided value as maximum bitrate setting.
+ * @param value the number to be stored
+ */
+- (void)setMaxBitrate:(nullable NSNumber *)value;
+
+@property(nonatomic) BOOL audioOnly;
+@property(nonatomic) BOOL createAecDump;
+@property(nonatomic) BOOL useManualAudioConfig;
+
+@end
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.m
new file mode 100644
index 0000000000..a3713e2f0e
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSettingsStore.m
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDSettingsStore.h"
+
+static NSString *const kVideoResolutionKey = @"rtc_video_resolution_key";
+static NSString *const kVideoCodecKey = @"rtc_video_codec_info_key";
+static NSString *const kBitrateKey = @"rtc_max_bitrate_key";
+static NSString *const kAudioOnlyKey = @"rtc_audio_only_key";
+static NSString *const kCreateAecDumpKey = @"rtc_create_aec_dump_key";
+static NSString *const kUseManualAudioConfigKey = @"rtc_use_manual_audio_config_key";
+
+NS_ASSUME_NONNULL_BEGIN
+@interface ARDSettingsStore () {
+ NSUserDefaults *_storage;
+}
+@property(nonatomic, strong, readonly) NSUserDefaults *storage;
+@end
+
+@implementation ARDSettingsStore
+
++ (void)setDefaultsForVideoResolution:(NSString *)videoResolution
+ videoCodec:(NSData *)videoCodec
+ bitrate:(nullable NSNumber *)bitrate
+ audioOnly:(BOOL)audioOnly
+ createAecDump:(BOOL)createAecDump
+ useManualAudioConfig:(BOOL)useManualAudioConfig {
+ NSMutableDictionary<NSString *, id> *defaultsDictionary = [@{
+ kAudioOnlyKey : @(audioOnly),
+ kCreateAecDumpKey : @(createAecDump),
+ kUseManualAudioConfigKey : @(useManualAudioConfig)
+ } mutableCopy];
+
+ if (videoResolution) {
+ defaultsDictionary[kVideoResolutionKey] = videoResolution;
+ }
+ if (videoCodec) {
+ defaultsDictionary[kVideoCodecKey] = videoCodec;
+ }
+ if (bitrate) {
+ defaultsDictionary[kBitrateKey] = bitrate;
+ }
+ [[NSUserDefaults standardUserDefaults] registerDefaults:defaultsDictionary];
+}
+
+- (NSUserDefaults *)storage {
+ if (!_storage) {
+ _storage = [NSUserDefaults standardUserDefaults];
+ }
+ return _storage;
+}
+
+- (NSString *)videoResolution {
+ return [self.storage objectForKey:kVideoResolutionKey];
+}
+
+- (void)setVideoResolution:(NSString *)resolution {
+ [self.storage setObject:resolution forKey:kVideoResolutionKey];
+ [self.storage synchronize];
+}
+
+- (NSData *)videoCodec {
+ return [self.storage objectForKey:kVideoCodecKey];
+}
+
+- (void)setVideoCodec:(NSData *)videoCodec {
+ [self.storage setObject:videoCodec forKey:kVideoCodecKey];
+ [self.storage synchronize];
+}
+
+- (nullable NSNumber *)maxBitrate {
+ return [self.storage objectForKey:kBitrateKey];
+}
+
+- (void)setMaxBitrate:(nullable NSNumber *)value {
+ [self.storage setObject:value forKey:kBitrateKey];
+ [self.storage synchronize];
+}
+
+- (BOOL)audioOnly {
+ return [self.storage boolForKey:kAudioOnlyKey];
+}
+
+- (void)setAudioOnly:(BOOL)audioOnly {
+ [self.storage setBool:audioOnly forKey:kAudioOnlyKey];
+ [self.storage synchronize];
+}
+
+- (BOOL)createAecDump {
+ return [self.storage boolForKey:kCreateAecDumpKey];
+}
+
+- (void)setCreateAecDump:(BOOL)createAecDump {
+ [self.storage setBool:createAecDump forKey:kCreateAecDumpKey];
+ [self.storage synchronize];
+}
+
+- (BOOL)useManualAudioConfig {
+ return [self.storage boolForKey:kUseManualAudioConfigKey];
+}
+
+- (void)setUseManualAudioConfig:(BOOL)useManualAudioConfig {
+ [self.storage setBool:useManualAudioConfig forKey:kUseManualAudioConfigKey];
+ [self.storage synchronize];
+}
+
+@end
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingChannel.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingChannel.h
new file mode 100644
index 0000000000..396b117b17
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingChannel.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "ARDSignalingMessage.h"
+
+typedef NS_ENUM(NSInteger, ARDSignalingChannelState) {
+ // State when disconnected.
+ kARDSignalingChannelStateClosed,
+ // State when connection is established but not ready for use.
+ kARDSignalingChannelStateOpen,
+ // State when connection is established and registered.
+ kARDSignalingChannelStateRegistered,
+ // State when connection encounters a fatal error.
+ kARDSignalingChannelStateError
+};
+
+@protocol ARDSignalingChannel;
+@protocol ARDSignalingChannelDelegate <NSObject>
+
+- (void)channel:(id<ARDSignalingChannel>)channel didChangeState:(ARDSignalingChannelState)state;
+
+- (void)channel:(id<ARDSignalingChannel>)channel didReceiveMessage:(ARDSignalingMessage *)message;
+
+@end
+
+@protocol ARDSignalingChannel <NSObject>
+
+@property(nonatomic, readonly) NSString *roomId;
+@property(nonatomic, readonly) NSString *clientId;
+@property(nonatomic, readonly) ARDSignalingChannelState state;
+@property(nonatomic, weak) id<ARDSignalingChannelDelegate> delegate;
+
+// Registers the channel for the given room and client id.
+- (void)registerForRoomId:(NSString *)roomId clientId:(NSString *)clientId;
+
+// Sends signaling message over the channel.
+- (void)sendMessage:(ARDSignalingMessage *)message;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.h
new file mode 100644
index 0000000000..ac19e8fba7
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "sdk/objc/api/peerconnection/RTCIceCandidate.h"
+#import "sdk/objc/api/peerconnection/RTCSessionDescription.h"
+
+typedef enum {
+ kARDSignalingMessageTypeCandidate,
+ kARDSignalingMessageTypeCandidateRemoval,
+ kARDSignalingMessageTypeOffer,
+ kARDSignalingMessageTypeAnswer,
+ kARDSignalingMessageTypeBye,
+} ARDSignalingMessageType;
+
+@interface ARDSignalingMessage : NSObject
+
+@property(nonatomic, readonly) ARDSignalingMessageType type;
+
++ (ARDSignalingMessage *)messageFromJSONString:(NSString *)jsonString;
+- (NSData *)JSONData;
+
+@end
+
+@interface ARDICECandidateMessage : ARDSignalingMessage
+
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCIceCandidate) * candidate;
+
+- (instancetype)initWithCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate;
+
+@end
+
+@interface ARDICECandidateRemovalMessage : ARDSignalingMessage
+
+@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates;
+
+- (instancetype)initWithRemovedCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates;
+
+@end
+
+@interface ARDSessionDescriptionMessage : ARDSignalingMessage
+
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription;
+
+- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description;
+
+@end
+
+@interface ARDByeMessage : ARDSignalingMessage
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.m
new file mode 100644
index 0000000000..049c0f5b0a
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDSignalingMessage.m
@@ -0,0 +1,160 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDSignalingMessage.h"
+
+#import "sdk/objc/base/RTCLogging.h"
+
+#import "ARDUtilities.h"
+#import "RTCIceCandidate+JSON.h"
+#import "RTCSessionDescription+JSON.h"
+
+static NSString * const kARDSignalingMessageTypeKey = @"type";
+static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
+
+@implementation ARDSignalingMessage
+
+@synthesize type = _type;
+
+- (instancetype)initWithType:(ARDSignalingMessageType)type {
+ if (self = [super init]) {
+ _type = type;
+ }
+ return self;
+}
+
+- (NSString *)description {
+ return [[NSString alloc] initWithData:[self JSONData]
+ encoding:NSUTF8StringEncoding];
+}
+
++ (ARDSignalingMessage *)messageFromJSONString:(NSString *)jsonString {
+ NSDictionary *values = [NSDictionary dictionaryWithJSONString:jsonString];
+ if (!values) {
+ RTCLogError(@"Error parsing signaling message JSON.");
+ return nil;
+ }
+
+ NSString *typeString = values[kARDSignalingMessageTypeKey];
+ ARDSignalingMessage *message = nil;
+ if ([typeString isEqualToString:@"candidate"]) {
+ RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
+ [RTC_OBJC_TYPE(RTCIceCandidate) candidateFromJSONDictionary:values];
+ message = [[ARDICECandidateMessage alloc] initWithCandidate:candidate];
+ } else if ([typeString isEqualToString:kARDTypeValueRemoveCandidates]) {
+ RTCLogInfo(@"Received remove-candidates message");
+ NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates =
+ [RTC_OBJC_TYPE(RTCIceCandidate) candidatesFromJSONDictionary:values];
+ message = [[ARDICECandidateRemovalMessage alloc]
+ initWithRemovedCandidates:candidates];
+ } else if ([typeString isEqualToString:@"offer"] ||
+ [typeString isEqualToString:@"answer"]) {
+ RTC_OBJC_TYPE(RTCSessionDescription) *description =
+ [RTC_OBJC_TYPE(RTCSessionDescription) descriptionFromJSONDictionary:values];
+ message =
+ [[ARDSessionDescriptionMessage alloc] initWithDescription:description];
+ } else if ([typeString isEqualToString:@"bye"]) {
+ message = [[ARDByeMessage alloc] init];
+ } else {
+ RTCLogError(@"Unexpected type: %@", typeString);
+ }
+ return message;
+}
+
+- (NSData *)JSONData {
+ return nil;
+}
+
+@end
+
+@implementation ARDICECandidateMessage
+
+@synthesize candidate = _candidate;
+
+- (instancetype)initWithCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate {
+ if (self = [super initWithType:kARDSignalingMessageTypeCandidate]) {
+ _candidate = candidate;
+ }
+ return self;
+}
+
+- (NSData *)JSONData {
+ return [_candidate JSONData];
+}
+
+@end
+
+@implementation ARDICECandidateRemovalMessage
+
+@synthesize candidates = _candidates;
+
+- (instancetype)initWithRemovedCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
+ NSParameterAssert(candidates.count);
+ if (self = [super initWithType:kARDSignalingMessageTypeCandidateRemoval]) {
+ _candidates = candidates;
+ }
+ return self;
+}
+
+- (NSData *)JSONData {
+ return [RTC_OBJC_TYPE(RTCIceCandidate) JSONDataForIceCandidates:_candidates
+ withType:kARDTypeValueRemoveCandidates];
+}
+
+@end
+
+@implementation ARDSessionDescriptionMessage
+
+@synthesize sessionDescription = _sessionDescription;
+
+- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description {
+ ARDSignalingMessageType messageType = kARDSignalingMessageTypeOffer;
+ RTCSdpType sdpType = description.type;
+ switch (sdpType) {
+ case RTCSdpTypeOffer:
+ messageType = kARDSignalingMessageTypeOffer;
+ break;
+ case RTCSdpTypeAnswer:
+ messageType = kARDSignalingMessageTypeAnswer;
+ break;
+ case RTCSdpTypePrAnswer:
+ case RTCSdpTypeRollback:
+ NSAssert(
+ NO, @"Unexpected type: %@", [RTC_OBJC_TYPE(RTCSessionDescription) stringForType:sdpType]);
+ break;
+ }
+ if (self = [super initWithType:messageType]) {
+ _sessionDescription = description;
+ }
+ return self;
+}
+
+- (NSData *)JSONData {
+ return [_sessionDescription JSONData];
+}
+
+@end
+
+@implementation ARDByeMessage
+
+- (instancetype)init {
+ return [super initWithType:kARDSignalingMessageTypeBye];
+}
+
+- (NSData *)JSONData {
+ NSDictionary *message = @{
+ @"type": @"bye"
+ };
+ return [NSJSONSerialization dataWithJSONObject:message
+ options:NSJSONWritingPrettyPrinted
+ error:NULL];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.h
new file mode 100644
index 0000000000..eaffa67049
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "sdk/objc/api/peerconnection/RTCStatisticsReport.h"
+#import "sdk/objc/base/RTCMacros.h"
+
+/** Class used to accumulate stats information into a single displayable string.
+ */
+@interface ARDStatsBuilder : NSObject
+
+/** String that represents the accumulated stats reports passed into this
+ * class.
+ */
+@property(nonatomic, readonly) NSString *statsString;
+@property(nonatomic) RTC_OBJC_TYPE(RTCStatisticsReport) * stats;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.m
new file mode 100644
index 0000000000..7ebf9fb1c7
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDStatsBuilder.m
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDStatsBuilder.h"
+
+#import "sdk/objc/api/peerconnection/RTCLegacyStatsReport.h"
+#import "sdk/objc/base/RTCMacros.h"
+
+#import "ARDUtilities.h"
+
+@implementation ARDStatsBuilder
+
+@synthesize stats = _stats;
+
+- (NSString *)statsString {
+ NSMutableString *result = [NSMutableString string];
+
+ [result appendFormat:@"(cpu)%ld%%\n", (long)ARDGetCpuUsagePercentage()];
+
+ for (NSString *key in _stats.statistics) {
+ RTC_OBJC_TYPE(RTCStatistics) *stat = _stats.statistics[key];
+ [result appendFormat:@"%@\n", stat.description];
+ }
+
+ return result;
+}
+
+@end
+
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient+Internal.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient+Internal.h
new file mode 100644
index 0000000000..3a579f8f7f
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient+Internal.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDTURNClient.h"
+
+@interface ARDTURNClient : NSObject <ARDTURNClient>
+
+- (instancetype)initWithURL:(NSURL *)url;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.h
new file mode 100644
index 0000000000..0399736f03
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "sdk/objc/base/RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCIceServer);
+
+@protocol ARDTURNClient <NSObject>
+
+// Returns TURN server urls if successful.
+- (void)requestServersWithCompletionHandler:(void (^)(NSArray *turnServers,
+ NSError *error))completionHandler;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.m
new file mode 100644
index 0000000000..069231cd7e
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDTURNClient.m
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDTURNClient+Internal.h"
+
+#import "ARDUtilities.h"
+#import "RTCIceServer+JSON.h"
+
+// TODO(tkchin): move this to a configuration object.
+static NSString *kTURNRefererURLString = @"https://appr.tc";
+static NSString *kARDTURNClientErrorDomain = @"ARDTURNClient";
+static NSInteger kARDTURNClientErrorBadResponse = -1;
+
+@implementation ARDTURNClient {
+ NSURL *_url;
+}
+
+- (instancetype)initWithURL:(NSURL *)url {
+ NSParameterAssert([url absoluteString].length);
+ if (self = [super init]) {
+ _url = url;
+ }
+ return self;
+}
+
+- (void)requestServersWithCompletionHandler:
+ (void (^)(NSArray *turnServers, NSError *error))completionHandler {
+
+ NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:_url];
+ [NSURLConnection sendAsyncRequest:request
+ completionHandler:^(NSURLResponse *response, NSData *data, NSError *error) {
+ if (error) {
+ completionHandler(nil, error);
+ return;
+ }
+ NSDictionary *responseDict = [NSDictionary dictionaryWithJSONData:data];
+ NSString *iceServerUrl = responseDict[@"ice_server_url"];
+ [self makeTurnServerRequestToURL:[NSURL URLWithString:iceServerUrl]
+ WithCompletionHandler:completionHandler];
+ }];
+}
+
+#pragma mark - Private
+
+- (void)makeTurnServerRequestToURL:(NSURL *)url
+ WithCompletionHandler:(void (^)(NSArray *turnServers,
+ NSError *error))completionHandler {
+ NSMutableURLRequest *iceServerRequest = [NSMutableURLRequest requestWithURL:url];
+ iceServerRequest.HTTPMethod = @"POST";
+ [iceServerRequest addValue:kTURNRefererURLString forHTTPHeaderField:@"referer"];
+ [NSURLConnection sendAsyncRequest:iceServerRequest
+ completionHandler:^(NSURLResponse *response,
+ NSData *data,
+ NSError *error) {
+ if (error) {
+ completionHandler(nil, error);
+ return;
+ }
+ NSDictionary *turnResponseDict = [NSDictionary dictionaryWithJSONData:data];
+ NSMutableArray *turnServers = [NSMutableArray array];
+ [turnResponseDict[@"iceServers"]
+ enumerateObjectsUsingBlock:^(NSDictionary *obj, NSUInteger idx, BOOL *stop) {
+ [turnServers addObject:[RTC_OBJC_TYPE(RTCIceServer) serverFromJSONDictionary:obj]];
+ }];
+ if (!turnServers) {
+ NSError *responseError =
+ [[NSError alloc] initWithDomain:kARDTURNClientErrorDomain
+ code:kARDTURNClientErrorBadResponse
+ userInfo:@{
+ NSLocalizedDescriptionKey: @"Bad TURN response.",
+ }];
+ completionHandler(nil, responseError);
+ return;
+ }
+ completionHandler(turnServers, nil);
+ }];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.h
new file mode 100644
index 0000000000..81888e6e83
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "ARDSignalingChannel.h"
+
+// Wraps a WebSocket connection to the AppRTC WebSocket server.
+@interface ARDWebSocketChannel : NSObject <ARDSignalingChannel>
+
+- (instancetype)initWithURL:(NSURL *)url
+ restURL:(NSURL *)restURL
+ delegate:(id<ARDSignalingChannelDelegate>)delegate;
+
+// Registers with the WebSocket server for the given room and client id once
+// the web socket connection is open.
+- (void)registerForRoomId:(NSString *)roomId clientId:(NSString *)clientId;
+
+// Sends message over the WebSocket connection if registered, otherwise POSTs to
+// the web socket server instead.
+- (void)sendMessage:(ARDSignalingMessage *)message;
+
+@end
+
+// Loopback mode is used to cause the client to connect to itself for testing.
+// A second web socket connection is established simulating the other client.
+// Any messages received are sent back to the WebSocket server after modifying
+// them as appropriate.
+@interface ARDLoopbackWebSocketChannel : ARDWebSocketChannel
+
+- (instancetype)initWithURL:(NSURL *)url restURL:(NSURL *)restURL;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.m
new file mode 100644
index 0000000000..bbb0bf87f8
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ARDWebSocketChannel.m
@@ -0,0 +1,252 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDWebSocketChannel.h"
+
+#import "sdk/objc/base/RTCLogging.h"
+
+#import "SRWebSocket.h"
+
+#import "ARDSignalingMessage.h"
+#import "ARDUtilities.h"
+
+// TODO(tkchin): move these to a configuration object.
+static NSString const *kARDWSSMessageErrorKey = @"error";
+static NSString const *kARDWSSMessagePayloadKey = @"msg";
+
+@interface ARDWebSocketChannel () <SRWebSocketDelegate>
+@end
+
+@implementation ARDWebSocketChannel {
+ NSURL *_url;
+ NSURL *_restURL;
+ SRWebSocket *_socket;
+}
+
+@synthesize delegate = _delegate;
+@synthesize state = _state;
+@synthesize roomId = _roomId;
+@synthesize clientId = _clientId;
+
+- (instancetype)initWithURL:(NSURL *)url
+ restURL:(NSURL *)restURL
+ delegate:(id<ARDSignalingChannelDelegate>)delegate {
+ if (self = [super init]) {
+ _url = url;
+ _restURL = restURL;
+ _delegate = delegate;
+ _socket = [[SRWebSocket alloc] initWithURL:url];
+ _socket.delegate = self;
+ RTCLog(@"Opening WebSocket.");
+ [_socket open];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self disconnect];
+}
+
+- (void)setState:(ARDSignalingChannelState)state {
+ if (_state == state) {
+ return;
+ }
+ _state = state;
+ [_delegate channel:self didChangeState:_state];
+}
+
+- (void)registerForRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId {
+ NSParameterAssert(roomId.length);
+ NSParameterAssert(clientId.length);
+ _roomId = roomId;
+ _clientId = clientId;
+ if (_state == kARDSignalingChannelStateOpen) {
+ [self registerWithCollider];
+ }
+}
+
+- (void)sendMessage:(ARDSignalingMessage *)message {
+ NSParameterAssert(_clientId.length);
+ NSParameterAssert(_roomId.length);
+ NSData *data = [message JSONData];
+ if (_state == kARDSignalingChannelStateRegistered) {
+ NSString *payload =
+ [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
+ NSDictionary *message = @{
+ @"cmd": @"send",
+ @"msg": payload,
+ };
+ NSData *messageJSONObject =
+ [NSJSONSerialization dataWithJSONObject:message
+ options:NSJSONWritingPrettyPrinted
+ error:nil];
+ NSString *messageString =
+ [[NSString alloc] initWithData:messageJSONObject
+ encoding:NSUTF8StringEncoding];
+ RTCLog(@"C->WSS: %@", messageString);
+ [_socket send:messageString];
+ } else {
+ NSString *dataString =
+ [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
+ RTCLog(@"C->WSS POST: %@", dataString);
+ NSString *urlString =
+ [NSString stringWithFormat:@"%@/%@/%@",
+ [_restURL absoluteString], _roomId, _clientId];
+ NSURL *url = [NSURL URLWithString:urlString];
+ [NSURLConnection sendAsyncPostToURL:url
+ withData:data
+ completionHandler:nil];
+ }
+}
+
+- (void)disconnect {
+ if (_state == kARDSignalingChannelStateClosed ||
+ _state == kARDSignalingChannelStateError) {
+ return;
+ }
+ [_socket close];
+ RTCLog(@"C->WSS DELETE rid:%@ cid:%@", _roomId, _clientId);
+ NSString *urlString =
+ [NSString stringWithFormat:@"%@/%@/%@",
+ [_restURL absoluteString], _roomId, _clientId];
+ NSURL *url = [NSURL URLWithString:urlString];
+ NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
+ request.HTTPMethod = @"DELETE";
+ request.HTTPBody = nil;
+ [NSURLConnection sendAsyncRequest:request completionHandler:nil];
+}
+
+#pragma mark - SRWebSocketDelegate
+
+- (void)webSocketDidOpen:(SRWebSocket *)webSocket {
+ RTCLog(@"WebSocket connection opened.");
+ self.state = kARDSignalingChannelStateOpen;
+ if (_roomId.length && _clientId.length) {
+ [self registerWithCollider];
+ }
+}
+
+- (void)webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message {
+ NSString *messageString = message;
+ NSData *messageData = [messageString dataUsingEncoding:NSUTF8StringEncoding];
+ id jsonObject = [NSJSONSerialization JSONObjectWithData:messageData
+ options:0
+ error:nil];
+ if (![jsonObject isKindOfClass:[NSDictionary class]]) {
+ RTCLogError(@"Unexpected message: %@", jsonObject);
+ return;
+ }
+ NSDictionary *wssMessage = jsonObject;
+ NSString *errorString = wssMessage[kARDWSSMessageErrorKey];
+ if (errorString.length) {
+ RTCLogError(@"WSS error: %@", errorString);
+ return;
+ }
+ NSString *payload = wssMessage[kARDWSSMessagePayloadKey];
+ ARDSignalingMessage *signalingMessage =
+ [ARDSignalingMessage messageFromJSONString:payload];
+ RTCLog(@"WSS->C: %@", payload);
+ [_delegate channel:self didReceiveMessage:signalingMessage];
+}
+
+- (void)webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error {
+ RTCLogError(@"WebSocket error: %@", error);
+ self.state = kARDSignalingChannelStateError;
+}
+
+- (void)webSocket:(SRWebSocket *)webSocket
+ didCloseWithCode:(NSInteger)code
+ reason:(NSString *)reason
+ wasClean:(BOOL)wasClean {
+ RTCLog(@"WebSocket closed with code: %ld reason:%@ wasClean:%d",
+ (long)code, reason, wasClean);
+ NSParameterAssert(_state != kARDSignalingChannelStateError);
+ self.state = kARDSignalingChannelStateClosed;
+}
+
+#pragma mark - Private
+
+- (void)registerWithCollider {
+ if (_state == kARDSignalingChannelStateRegistered) {
+ return;
+ }
+ NSParameterAssert(_roomId.length);
+ NSParameterAssert(_clientId.length);
+ NSDictionary *registerMessage = @{
+ @"cmd": @"register",
+ @"roomid" : _roomId,
+ @"clientid" : _clientId,
+ };
+ NSData *message =
+ [NSJSONSerialization dataWithJSONObject:registerMessage
+ options:NSJSONWritingPrettyPrinted
+ error:nil];
+ NSString *messageString =
+ [[NSString alloc] initWithData:message encoding:NSUTF8StringEncoding];
+ RTCLog(@"Registering on WSS for rid:%@ cid:%@", _roomId, _clientId);
+ // Registration can fail if server rejects it. For example, if the room is
+ // full.
+ [_socket send:messageString];
+ self.state = kARDSignalingChannelStateRegistered;
+}
+
+@end
+
+@interface ARDLoopbackWebSocketChannel () <ARDSignalingChannelDelegate>
+@end
+
+@implementation ARDLoopbackWebSocketChannel
+
+- (instancetype)initWithURL:(NSURL *)url restURL:(NSURL *)restURL {
+ return [super initWithURL:url restURL:restURL delegate:self];
+}
+
+#pragma mark - ARDSignalingChannelDelegate
+
+- (void)channel:(id<ARDSignalingChannel>)channel
+ didReceiveMessage:(ARDSignalingMessage *)message {
+ switch (message.type) {
+ case kARDSignalingMessageTypeOffer: {
+ // Change message to answer, send back to server.
+ ARDSessionDescriptionMessage *sdpMessage =
+ (ARDSessionDescriptionMessage *)message;
+ RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription;
+ NSString *dsc = description.sdp;
+ dsc = [dsc stringByReplacingOccurrencesOfString:@"offer"
+ withString:@"answer"];
+ RTC_OBJC_TYPE(RTCSessionDescription) *answerDescription =
+ [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:dsc];
+ ARDSignalingMessage *answer =
+ [[ARDSessionDescriptionMessage alloc]
+ initWithDescription:answerDescription];
+ [self sendMessage:answer];
+ break;
+ }
+ case kARDSignalingMessageTypeAnswer:
+ // Should not receive answer in loopback scenario.
+ break;
+ case kARDSignalingMessageTypeCandidate:
+ case kARDSignalingMessageTypeCandidateRemoval:
+ // Send back to server.
+ [self sendMessage:message];
+ break;
+ case kARDSignalingMessageTypeBye:
+ // Nothing to do.
+ return;
+ }
+}
+
+- (void)channel:(id<ARDSignalingChannel>)channel
+ didChangeState:(ARDSignalingChannelState)state {
+}
+
+@end
+
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h
new file mode 100644
index 0000000000..5fd823f2de
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "sdk/objc/api/peerconnection/RTCIceCandidate.h"
+
+@interface RTC_OBJC_TYPE (RTCIceCandidate)
+(JSON)
+
+ + (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary;
++ (NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidatesFromJSONDictionary:
+ (NSDictionary *)dictionary;
++ (NSData *)JSONDataForIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates
+ withType:(NSString *)typeValue;
+- (NSData *)JSONData;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m
new file mode 100644
index 0000000000..99cefbff0b
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m
@@ -0,0 +1,100 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceCandidate+JSON.h"
+
+#import "sdk/objc/base/RTCLogging.h"
+
+static NSString const *kRTCICECandidateTypeKey = @"type";
+static NSString const *kRTCICECandidateTypeValue = @"candidate";
+static NSString const *kRTCICECandidateMidKey = @"id";
+static NSString const *kRTCICECandidateMLineIndexKey = @"label";
+static NSString const *kRTCICECandidateSdpKey = @"candidate";
+static NSString const *kRTCICECandidatesTypeKey = @"candidates";
+
+@implementation RTC_OBJC_TYPE (RTCIceCandidate)
+(JSON)
+
+ + (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary {
+ NSString *mid = dictionary[kRTCICECandidateMidKey];
+ NSString *sdp = dictionary[kRTCICECandidateSdpKey];
+ NSNumber *num = dictionary[kRTCICECandidateMLineIndexKey];
+ NSInteger mLineIndex = [num integerValue];
+ return [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp
+ sdpMLineIndex:mLineIndex
+ sdpMid:mid];
+}
+
++ (NSData *)JSONDataForIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates
+ withType:(NSString *)typeValue {
+ NSMutableArray *jsonCandidates =
+ [NSMutableArray arrayWithCapacity:candidates.count];
+ for (RTC_OBJC_TYPE(RTCIceCandidate) * candidate in candidates) {
+ NSDictionary *jsonCandidate = [candidate JSONDictionary];
+ [jsonCandidates addObject:jsonCandidate];
+ }
+ NSDictionary *json = @{
+ kRTCICECandidateTypeKey : typeValue,
+ kRTCICECandidatesTypeKey : jsonCandidates
+ };
+ NSError *error = nil;
+ NSData *data =
+ [NSJSONSerialization dataWithJSONObject:json
+ options:NSJSONWritingPrettyPrinted
+ error:&error];
+ if (error) {
+ RTCLogError(@"Error serializing JSON: %@", error);
+ return nil;
+ }
+ return data;
+}
+
++ (NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidatesFromJSONDictionary:
+ (NSDictionary *)dictionary {
+ NSArray *jsonCandidates = dictionary[kRTCICECandidatesTypeKey];
+ NSMutableArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates =
+ [NSMutableArray arrayWithCapacity:jsonCandidates.count];
+ for (NSDictionary *jsonCandidate in jsonCandidates) {
+ RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
+ [RTC_OBJC_TYPE(RTCIceCandidate) candidateFromJSONDictionary:jsonCandidate];
+ [candidates addObject:candidate];
+ }
+ return candidates;
+}
+
+- (NSData *)JSONData {
+ NSDictionary *json = @{
+ kRTCICECandidateTypeKey : kRTCICECandidateTypeValue,
+ kRTCICECandidateMLineIndexKey : @(self.sdpMLineIndex),
+ kRTCICECandidateMidKey : self.sdpMid,
+ kRTCICECandidateSdpKey : self.sdp
+ };
+ NSError *error = nil;
+ NSData *data =
+ [NSJSONSerialization dataWithJSONObject:json
+ options:NSJSONWritingPrettyPrinted
+ error:&error];
+ if (error) {
+ RTCLogError(@"Error serializing JSON: %@", error);
+ return nil;
+ }
+ return data;
+}
+
+- (NSDictionary *)JSONDictionary{
+ NSDictionary *json = @{
+ kRTCICECandidateMLineIndexKey : @(self.sdpMLineIndex),
+ kRTCICECandidateMidKey : self.sdpMid,
+ kRTCICECandidateSdpKey : self.sdp
+ };
+ return json;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.h
new file mode 100644
index 0000000000..35f6af7583
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "sdk/objc/api/peerconnection/RTCIceServer.h"
+
+@interface RTC_OBJC_TYPE (RTCIceServer)
+(JSON)
+
+ + (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.m
new file mode 100644
index 0000000000..b5272a2f64
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCIceServer+JSON.m
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceServer+JSON.h"
+
+@implementation RTC_OBJC_TYPE (RTCIceServer)
+(JSON)
+
+ + (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary {
+ NSArray *turnUrls = dictionary[@"urls"];
+ NSString *username = dictionary[@"username"] ?: @"";
+ NSString *credential = dictionary[@"credential"] ?: @"";
+ return [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:turnUrls
+ username:username
+ credential:credential];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.h
new file mode 100644
index 0000000000..74d03d11b9
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "sdk/objc/api/peerconnection/RTCSessionDescription.h"
+
+@interface RTC_OBJC_TYPE (RTCSessionDescription)
+(JSON)
+
+ + (RTC_OBJC_TYPE(RTCSessionDescription) *)descriptionFromJSONDictionary
+ : (NSDictionary *)dictionary;
+- (NSData *)JSONData;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m
new file mode 100644
index 0000000000..28268faa84
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCSessionDescription+JSON.h"
+
+static NSString const *kRTCSessionDescriptionTypeKey = @"type";
+static NSString const *kRTCSessionDescriptionSdpKey = @"sdp";
+
+@implementation RTC_OBJC_TYPE (RTCSessionDescription)
+(JSON)
+
+ + (RTC_OBJC_TYPE(RTCSessionDescription) *)descriptionFromJSONDictionary
+ : (NSDictionary *)dictionary {
+ NSString *typeString = dictionary[kRTCSessionDescriptionTypeKey];
+ RTCSdpType type = [[self class] typeForString:typeString];
+ NSString *sdp = dictionary[kRTCSessionDescriptionSdpKey];
+ return [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:type sdp:sdp];
+}
+
+- (NSData *)JSONData {
+ NSString *type = [[self class] stringForType:self.type];
+ NSDictionary *json = @{
+ kRTCSessionDescriptionTypeKey : type,
+ kRTCSessionDescriptionSdpKey : self.sdp
+ };
+ return [NSJSONSerialization dataWithJSONObject:json options:0 error:nil];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.h
new file mode 100644
index 0000000000..5f0d7dbef7
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+@interface NSDictionary (ARDUtilites)
+
+// Creates a dictionary with the keys and values in the JSON object.
++ (NSDictionary *)dictionaryWithJSONString:(NSString *)jsonString;
++ (NSDictionary *)dictionaryWithJSONData:(NSData *)jsonData;
+
+@end
+
+@interface NSURLConnection (ARDUtilities)
+
+// Issues an asynchronous request that calls back on main queue.
++ (void)sendAsyncRequest:(NSURLRequest *)request
+ completionHandler:
+ (void (^)(NSURLResponse *response, NSData *data, NSError *error))completionHandler;
+
+// Posts data to the specified URL.
++ (void)sendAsyncPostToURL:(NSURL *)url
+ withData:(NSData *)data
+ completionHandler:(void (^)(BOOL succeeded, NSData *data))completionHandler;
+
+@end
+
+NSInteger ARDGetCpuUsagePercentage(void);
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.m
new file mode 100644
index 0000000000..e0674f5210
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/common/ARDUtilities.m
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDUtilities.h"
+
+#import <mach/mach.h>
+
+#import "sdk/objc/base/RTCLogging.h"
+
+@implementation NSDictionary (ARDUtilites)
+
++ (NSDictionary *)dictionaryWithJSONString:(NSString *)jsonString {
+ NSParameterAssert(jsonString.length > 0);
+ NSData *data = [jsonString dataUsingEncoding:NSUTF8StringEncoding];
+ NSError *error = nil;
+ NSDictionary *dict =
+ [NSJSONSerialization JSONObjectWithData:data options:0 error:&error];
+ if (error) {
+ RTCLogError(@"Error parsing JSON: %@", error.localizedDescription);
+ }
+ return dict;
+}
+
++ (NSDictionary *)dictionaryWithJSONData:(NSData *)jsonData {
+ NSError *error = nil;
+ NSDictionary *dict =
+ [NSJSONSerialization JSONObjectWithData:jsonData options:0 error:&error];
+ if (error) {
+ RTCLogError(@"Error parsing JSON: %@", error.localizedDescription);
+ }
+ return dict;
+}
+
+@end
+
+@implementation NSURLConnection (ARDUtilities)
+
++ (void)sendAsyncRequest:(NSURLRequest *)request
+ completionHandler:(void (^)(NSURLResponse *response,
+ NSData *data,
+ NSError *error))completionHandler {
+ // Kick off an async request which will call back on main thread.
+ NSURLSession *session = [NSURLSession sharedSession];
+ [[session dataTaskWithRequest:request
+ completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) {
+ if (completionHandler) {
+ completionHandler(response, data, error);
+ }
+ }] resume];
+}
+
+// Posts data to the specified URL.
++ (void)sendAsyncPostToURL:(NSURL *)url
+ withData:(NSData *)data
+ completionHandler:(void (^)(BOOL succeeded,
+ NSData *data))completionHandler {
+ NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
+ request.HTTPMethod = @"POST";
+ request.HTTPBody = data;
+ [[self class] sendAsyncRequest:request
+ completionHandler:^(NSURLResponse *response,
+ NSData *data,
+ NSError *error) {
+ if (error) {
+ RTCLogError(@"Error posting data: %@", error.localizedDescription);
+ if (completionHandler) {
+ completionHandler(NO, data);
+ }
+ return;
+ }
+ NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response;
+ if (httpResponse.statusCode != 200) {
+ NSString *serverResponse = data.length > 0 ?
+ [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding] :
+ nil;
+ RTCLogError(@"Received bad response: %@", serverResponse);
+ if (completionHandler) {
+ completionHandler(NO, data);
+ }
+ return;
+ }
+ if (completionHandler) {
+ completionHandler(YES, data);
+ }
+ }];
+}
+
+@end
+
+NSInteger ARDGetCpuUsagePercentage(void) {
+ // Create an array of thread ports for the current task.
+ const task_t task = mach_task_self();
+ thread_act_array_t thread_array;
+ mach_msg_type_number_t thread_count;
+ if (task_threads(task, &thread_array, &thread_count) != KERN_SUCCESS) {
+ return -1;
+ }
+
+ // Sum cpu usage from all threads.
+ float cpu_usage_percentage = 0;
+ thread_basic_info_data_t thread_info_data = {};
+ mach_msg_type_number_t thread_info_count;
+ for (size_t i = 0; i < thread_count; ++i) {
+ thread_info_count = THREAD_BASIC_INFO_COUNT;
+ kern_return_t ret = thread_info(thread_array[i],
+ THREAD_BASIC_INFO,
+ (thread_info_t)&thread_info_data,
+ &thread_info_count);
+ if (ret == KERN_SUCCESS) {
+ cpu_usage_percentage +=
+ 100.f * (float)thread_info_data.cpu_usage / TH_USAGE_SCALE;
+ }
+ }
+
+ // Dealloc the created array.
+ vm_deallocate(task, (vm_address_t)thread_array,
+ sizeof(thread_act_t) * thread_count);
+ return lroundf(cpu_usage_percentage);
+}
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.h
new file mode 100644
index 0000000000..7eafff8ebc
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+// The main application class of the AppRTCMobile iOS app demonstrating
+// interoperability between the Objective C implementation of PeerConnection
+// and the appr.tc demo webapp.
+@interface ARDAppDelegate : NSObject <UIApplicationDelegate>
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.m
new file mode 100644
index 0000000000..51e9910b87
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDAppDelegate.m
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDAppDelegate.h"
+
+#import "sdk/objc/api/peerconnection/RTCFieldTrials.h"
+#import "sdk/objc/api/peerconnection/RTCSSLAdapter.h"
+#import "sdk/objc/api/peerconnection/RTCTracing.h"
+#import "sdk/objc/base/RTCLogging.h"
+
+#import "ARDMainViewController.h"
+
+@implementation ARDAppDelegate {
+ UIWindow *_window;
+}
+
+#pragma mark - UIApplicationDelegate methods
+
+- (BOOL)application:(UIApplication *)application
+ didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
+ NSDictionary *fieldTrials = @{};
+ RTCInitFieldTrialDictionary(fieldTrials);
+ RTCInitializeSSL();
+ RTCSetupInternalTracer();
+ _window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
+ [_window makeKeyAndVisible];
+ ARDMainViewController *viewController = [[ARDMainViewController alloc] init];
+
+ UINavigationController *root =
+ [[UINavigationController alloc] initWithRootViewController:viewController];
+ root.navigationBar.translucent = NO;
+ _window.rootViewController = root;
+
+#if defined(NDEBUG)
+ // In debug builds the default level is LS_INFO and in non-debug builds it is
+ // disabled. Continue to log to console in non-debug builds, but only
+ // warnings and errors.
+ RTCSetMinDebugLogLevel(RTCLoggingSeverityWarning);
+#endif
+
+ return YES;
+}
+
+- (void)applicationWillTerminate:(UIApplication *)application {
+ RTCShutdownInternalTracer();
+ RTCCleanupSSL();
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h
new file mode 100644
index 0000000000..82f8fcdd1b
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "sdk/objc/base/RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCFileVideoCapturer);
+
+/**
+ * Controls a file capturer.
+ */
+NS_CLASS_AVAILABLE_IOS(10)
+@interface ARDFileCaptureController : NSObject
+
+/**
+ * Creates instance of the controller.
+ *
+ * @param capturer The capturer to be controlled.
+ */
+- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer;
+
+/**
+ * Starts the file capturer.
+ *
+ * Possible errors produced by the capturer will be logged.
+ */
+- (void)startCapture;
+
+/**
+ * Immediately stops capturer.
+ */
+- (void)stopCapture;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m
new file mode 100644
index 0000000000..2ddde6dd59
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDFileCaptureController.h"
+
+#import "sdk/objc/components/capturer/RTCFileVideoCapturer.h"
+
+@interface ARDFileCaptureController ()
+
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCFileVideoCapturer) * fileCapturer;
+
+@end
+
+@implementation ARDFileCaptureController
+@synthesize fileCapturer = _fileCapturer;
+
+- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer {
+ if (self = [super init]) {
+ _fileCapturer = capturer;
+ }
+ return self;
+}
+
+- (void)startCapture {
+ [self startFileCapture];
+}
+
+- (void)startFileCapture {
+ [self.fileCapturer startCapturingFromFileNamed:@"foreman.mp4"
+ onError:^(NSError *_Nonnull error) {
+ NSLog(@"Error %@", error.userInfo);
+ }];
+}
+
+- (void)stopCapture {
+ [self.fileCapturer stopCapture];
+}
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.h
new file mode 100644
index 0000000000..c6691c2d84
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+@class ARDMainView;
+
+@protocol ARDMainViewDelegate <NSObject>
+
+- (void)mainView:(ARDMainView *)mainView didInputRoom:(NSString *)room isLoopback:(BOOL)isLoopback;
+- (void)mainViewDidToggleAudioLoop:(ARDMainView *)mainView;
+
+@end
+
+// The main view of AppRTCMobile. It contains an input field for entering a room
+// name on apprtc to connect to.
+@interface ARDMainView : UIView
+
+@property(nonatomic, weak) id<ARDMainViewDelegate> delegate;
+// Updates the audio loop button as needed.
+@property(nonatomic, assign) BOOL isAudioLoopPlaying;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.m
new file mode 100644
index 0000000000..d9521060eb
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainView.m
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDMainView.h"
+
+#import "UIImage+ARDUtilities.h"
+
+static CGFloat const kRoomTextFieldHeight = 40;
+static CGFloat const kRoomTextFieldMargin = 8;
+static CGFloat const kCallControlMargin = 8;
+
+// Helper view that contains a text field and a clear button.
+@interface ARDRoomTextField : UIView <UITextFieldDelegate>
+@property(nonatomic, readonly) NSString *roomText;
+@end
+
+@implementation ARDRoomTextField {
+ UITextField *_roomText;
+}
+
+- (instancetype)initWithFrame:(CGRect)frame {
+ if (self = [super initWithFrame:frame]) {
+ _roomText = [[UITextField alloc] initWithFrame:CGRectZero];
+ _roomText.borderStyle = UITextBorderStyleNone;
+ _roomText.font = [UIFont systemFontOfSize:12];
+ _roomText.placeholder = @"Room name";
+ _roomText.autocorrectionType = UITextAutocorrectionTypeNo;
+ _roomText.autocapitalizationType = UITextAutocapitalizationTypeNone;
+ _roomText.clearButtonMode = UITextFieldViewModeAlways;
+ _roomText.delegate = self;
+ [self addSubview:_roomText];
+
+ // Give rounded corners and a light gray border.
+ self.layer.borderWidth = 1;
+ self.layer.borderColor = [[UIColor lightGrayColor] CGColor];
+ self.layer.cornerRadius = 2;
+ }
+ return self;
+}
+
+- (void)layoutSubviews {
+ _roomText.frame =
+ CGRectMake(kRoomTextFieldMargin, 0, CGRectGetWidth(self.bounds) - kRoomTextFieldMargin,
+ kRoomTextFieldHeight);
+}
+
+- (CGSize)sizeThatFits:(CGSize)size {
+ size.height = kRoomTextFieldHeight;
+ return size;
+}
+
+- (NSString *)roomText {
+ return _roomText.text;
+}
+
+#pragma mark - UITextFieldDelegate
+
+- (BOOL)textFieldShouldReturn:(UITextField *)textField {
+ // There is no other control that can take focus, so manually resign focus
+ // when return (Join) is pressed to trigger `textFieldDidEndEditing`.
+ [textField resignFirstResponder];
+ return YES;
+}
+
+@end
+
+@implementation ARDMainView {
+ ARDRoomTextField *_roomText;
+ UIButton *_startRegularCallButton;
+ UIButton *_startLoopbackCallButton;
+ UIButton *_audioLoopButton;
+}
+
+@synthesize delegate = _delegate;
+@synthesize isAudioLoopPlaying = _isAudioLoopPlaying;
+
+- (instancetype)initWithFrame:(CGRect)frame {
+ if (self = [super initWithFrame:frame]) {
+ _roomText = [[ARDRoomTextField alloc] initWithFrame:CGRectZero];
+ [self addSubview:_roomText];
+
+ UIFont *controlFont = [UIFont boldSystemFontOfSize:18.0];
+ UIColor *controlFontColor = [UIColor whiteColor];
+
+ _startRegularCallButton = [UIButton buttonWithType:UIButtonTypeSystem];
+ _startRegularCallButton.titleLabel.font = controlFont;
+ [_startRegularCallButton setTitleColor:controlFontColor forState:UIControlStateNormal];
+ _startRegularCallButton.backgroundColor
+ = [UIColor colorWithRed:66.0/255.0 green:200.0/255.0 blue:90.0/255.0 alpha:1.0];
+ [_startRegularCallButton setTitle:@"Call room" forState:UIControlStateNormal];
+ [_startRegularCallButton addTarget:self
+ action:@selector(onStartRegularCall:)
+ forControlEvents:UIControlEventTouchUpInside];
+ [self addSubview:_startRegularCallButton];
+
+ _startLoopbackCallButton = [UIButton buttonWithType:UIButtonTypeSystem];
+ _startLoopbackCallButton.titleLabel.font = controlFont;
+ [_startLoopbackCallButton setTitleColor:controlFontColor forState:UIControlStateNormal];
+ _startLoopbackCallButton.backgroundColor =
+ [UIColor colorWithRed:0.0 green:122.0/255.0 blue:1.0 alpha:1.0];
+ [_startLoopbackCallButton setTitle:@"Loopback call" forState:UIControlStateNormal];
+ [_startLoopbackCallButton addTarget:self
+ action:@selector(onStartLoopbackCall:)
+ forControlEvents:UIControlEventTouchUpInside];
+ [self addSubview:_startLoopbackCallButton];
+
+
+ // Used to test what happens to sounds when calls are in progress.
+ _audioLoopButton = [UIButton buttonWithType:UIButtonTypeSystem];
+ _audioLoopButton.titleLabel.font = controlFont;
+ [_audioLoopButton setTitleColor:controlFontColor forState:UIControlStateNormal];
+ _audioLoopButton.backgroundColor =
+ [UIColor colorWithRed:1.0 green:149.0/255.0 blue:0.0 alpha:1.0];
+ [self updateAudioLoopButton];
+ [_audioLoopButton addTarget:self
+ action:@selector(onToggleAudioLoop:)
+ forControlEvents:UIControlEventTouchUpInside];
+ [self addSubview:_audioLoopButton];
+
+ self.backgroundColor = [UIColor whiteColor];
+ }
+ return self;
+}
+
+- (void)setIsAudioLoopPlaying:(BOOL)isAudioLoopPlaying {
+ if (_isAudioLoopPlaying == isAudioLoopPlaying) {
+ return;
+ }
+ _isAudioLoopPlaying = isAudioLoopPlaying;
+ [self updateAudioLoopButton];
+}
+
+- (void)layoutSubviews {
+ CGRect bounds = self.bounds;
+ CGFloat roomTextWidth = bounds.size.width - 2 * kRoomTextFieldMargin;
+ CGFloat roomTextHeight = [_roomText sizeThatFits:bounds.size].height;
+ _roomText.frame =
+ CGRectMake(kRoomTextFieldMargin, kRoomTextFieldMargin, roomTextWidth,
+ roomTextHeight);
+
+ CGFloat buttonHeight =
+ (CGRectGetMaxY(self.bounds) - CGRectGetMaxY(_roomText.frame) - kCallControlMargin * 4) / 3;
+
+ CGFloat regularCallFrameTop = CGRectGetMaxY(_roomText.frame) + kCallControlMargin;
+ CGRect regularCallFrame = CGRectMake(kCallControlMargin,
+ regularCallFrameTop,
+ bounds.size.width - 2*kCallControlMargin,
+ buttonHeight);
+
+ CGFloat loopbackCallFrameTop = CGRectGetMaxY(regularCallFrame) + kCallControlMargin;
+ CGRect loopbackCallFrame = CGRectMake(kCallControlMargin,
+ loopbackCallFrameTop,
+ bounds.size.width - 2*kCallControlMargin,
+ buttonHeight);
+
+ CGFloat audioLoopTop = CGRectGetMaxY(loopbackCallFrame) + kCallControlMargin;
+ CGRect audioLoopFrame = CGRectMake(kCallControlMargin,
+ audioLoopTop,
+ bounds.size.width - 2*kCallControlMargin,
+ buttonHeight);
+
+ _startRegularCallButton.frame = regularCallFrame;
+ _startLoopbackCallButton.frame = loopbackCallFrame;
+ _audioLoopButton.frame = audioLoopFrame;
+}
+
+#pragma mark - Private
+
+- (void)updateAudioLoopButton {
+ if (_isAudioLoopPlaying) {
+ [_audioLoopButton setTitle:@"Stop sound" forState:UIControlStateNormal];
+ } else {
+ [_audioLoopButton setTitle:@"Play sound" forState:UIControlStateNormal];
+ }
+}
+
+- (void)onToggleAudioLoop:(id)sender {
+ [_delegate mainViewDidToggleAudioLoop:self];
+}
+
+- (void)onStartRegularCall:(id)sender {
+ [_delegate mainView:self didInputRoom:_roomText.roomText isLoopback:NO];
+}
+
+- (void)onStartLoopbackCall:(id)sender {
+ [_delegate mainView:self didInputRoom:_roomText.roomText isLoopback:YES];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.h
new file mode 100644
index 0000000000..e5c92dd304
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.h
@@ -0,0 +1,14 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+@interface ARDMainViewController : UIViewController
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.m
new file mode 100644
index 0000000000..e8b8112e41
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDMainViewController.m
@@ -0,0 +1,263 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDMainViewController.h"
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "sdk/objc/base/RTCLogging.h"
+#import "sdk/objc/components/audio/RTCAudioSession.h"
+#import "sdk/objc/components/audio/RTCAudioSessionConfiguration.h"
+#import "sdk/objc/helpers/RTCDispatcher.h"
+
+#import "ARDAppClient.h"
+#import "ARDMainView.h"
+#import "ARDSettingsModel.h"
+#import "ARDSettingsViewController.h"
+#import "ARDVideoCallViewController.h"
+
+static NSString *const barButtonImageString = @"ic_settings_black_24dp.png";
+
+// Launch argument to be passed to indicate that the app should start loopback immediatly
+static NSString *const loopbackLaunchProcessArgument = @"loopback";
+
+@interface ARDMainViewController () <ARDMainViewDelegate,
+ ARDVideoCallViewControllerDelegate,
+ RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
+@property(nonatomic, strong) ARDMainView *mainView;
+@property(nonatomic, strong) AVAudioPlayer *audioPlayer;
+@end
+
+@implementation ARDMainViewController {
+ BOOL _useManualAudio;
+}
+
+@synthesize mainView = _mainView;
+@synthesize audioPlayer = _audioPlayer;
+
+- (void)viewDidLoad {
+ [super viewDidLoad];
+ if ([[[NSProcessInfo processInfo] arguments] containsObject:loopbackLaunchProcessArgument]) {
+ [self mainView:nil didInputRoom:@"" isLoopback:YES];
+ }
+}
+
+- (void)loadView {
+ self.title = @"AppRTC Mobile";
+ _mainView = [[ARDMainView alloc] initWithFrame:CGRectZero];
+ _mainView.delegate = self;
+ self.view = _mainView;
+ [self addSettingsBarButton];
+
+ RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *webRTCConfig =
+ [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
+ webRTCConfig.categoryOptions = webRTCConfig.categoryOptions |
+ AVAudioSessionCategoryOptionDefaultToSpeaker;
+ [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) setWebRTCConfiguration:webRTCConfig];
+
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session addDelegate:self];
+
+ [self configureAudioSession];
+ [self setupAudioPlayer];
+}
+
+- (void)addSettingsBarButton {
+ UIBarButtonItem *settingsButton =
+ [[UIBarButtonItem alloc] initWithImage:[UIImage imageNamed:barButtonImageString]
+ style:UIBarButtonItemStylePlain
+ target:self
+ action:@selector(showSettings:)];
+ self.navigationItem.rightBarButtonItem = settingsButton;
+}
+
++ (NSString *)loopbackRoomString {
+ NSString *loopbackRoomString =
+ [[NSUUID UUID].UUIDString stringByReplacingOccurrencesOfString:@"-" withString:@""];
+ return loopbackRoomString;
+}
+
+#pragma mark - ARDMainViewDelegate
+
+- (void)mainView:(ARDMainView *)mainView didInputRoom:(NSString *)room isLoopback:(BOOL)isLoopback {
+ if (!room.length) {
+ if (isLoopback) {
+ // If this is a loopback call, allow a generated room name.
+ room = [[self class] loopbackRoomString];
+ } else {
+ [self showAlertWithMessage:@"Missing room name."];
+ return;
+ }
+ }
+ // Trim whitespaces.
+ NSCharacterSet *whitespaceSet = [NSCharacterSet whitespaceCharacterSet];
+ NSString *trimmedRoom = [room stringByTrimmingCharactersInSet:whitespaceSet];
+
+ // Check that room name is valid.
+ NSError *error = nil;
+ NSRegularExpressionOptions options = NSRegularExpressionCaseInsensitive;
+ NSRegularExpression *regex =
+ [NSRegularExpression regularExpressionWithPattern:@"\\w+"
+ options:options
+ error:&error];
+ if (error) {
+ [self showAlertWithMessage:error.localizedDescription];
+ return;
+ }
+ NSRange matchRange =
+ [regex rangeOfFirstMatchInString:trimmedRoom
+ options:0
+ range:NSMakeRange(0, trimmedRoom.length)];
+ if (matchRange.location == NSNotFound ||
+ matchRange.length != trimmedRoom.length) {
+ [self showAlertWithMessage:@"Invalid room name."];
+ return;
+ }
+
+ ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
+
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ session.useManualAudio = [settingsModel currentUseManualAudioConfigSettingFromStore];
+ session.isAudioEnabled = NO;
+
+ // Kick off the video call.
+ ARDVideoCallViewController *videoCallViewController =
+ [[ARDVideoCallViewController alloc] initForRoom:trimmedRoom
+ isLoopback:isLoopback
+ delegate:self];
+ videoCallViewController.modalTransitionStyle =
+ UIModalTransitionStyleCrossDissolve;
+ videoCallViewController.modalPresentationStyle = UIModalPresentationFullScreen;
+ [self presentViewController:videoCallViewController
+ animated:YES
+ completion:nil];
+}
+
+- (void)mainViewDidToggleAudioLoop:(ARDMainView *)mainView {
+ if (mainView.isAudioLoopPlaying) {
+ [_audioPlayer stop];
+ } else {
+ [_audioPlayer play];
+ }
+ mainView.isAudioLoopPlaying = _audioPlayer.playing;
+}
+
+#pragma mark - ARDVideoCallViewControllerDelegate
+
+- (void)viewControllerDidFinish:(ARDVideoCallViewController *)viewController {
+ if (![viewController isBeingDismissed]) {
+ RTCLog(@"Dismissing VC");
+ [self dismissViewControllerAnimated:YES completion:^{
+ [self restartAudioPlayerIfNeeded];
+ }];
+ }
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ session.isAudioEnabled = NO;
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
+
+- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+ // Stop playback on main queue and then configure WebRTC.
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeMain
+ block:^{
+ if (self.mainView.isAudioLoopPlaying) {
+ RTCLog(@"Stopping audio loop due to WebRTC start.");
+ [self.audioPlayer stop];
+ }
+ RTCLog(@"Setting isAudioEnabled to YES.");
+ session.isAudioEnabled = YES;
+ }];
+}
+
+- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+ // WebRTC is done with the audio session. Restart playback.
+ [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeMain
+ block:^{
+ RTCLog(@"audioSessionDidStopPlayOrRecord");
+ [self restartAudioPlayerIfNeeded];
+ }];
+}
+
+#pragma mark - Private
+- (void)showSettings:(id)sender {
+ ARDSettingsViewController *settingsController =
+ [[ARDSettingsViewController alloc] initWithStyle:UITableViewStyleGrouped
+ settingsModel:[[ARDSettingsModel alloc] init]];
+
+ UINavigationController *navigationController =
+ [[UINavigationController alloc] initWithRootViewController:settingsController];
+ [self presentViewControllerAsModal:navigationController];
+}
+
+- (void)presentViewControllerAsModal:(UIViewController *)viewController {
+ [self presentViewController:viewController animated:YES completion:nil];
+}
+
+- (void)configureAudioSession {
+ RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *configuration =
+ [[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) alloc] init];
+ configuration.category = AVAudioSessionCategoryAmbient;
+ configuration.categoryOptions = AVAudioSessionCategoryOptionDuckOthers;
+ configuration.mode = AVAudioSessionModeDefault;
+
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session lockForConfiguration];
+ BOOL hasSucceeded = NO;
+ NSError *error = nil;
+ if (session.isActive) {
+ hasSucceeded = [session setConfiguration:configuration error:&error];
+ } else {
+ hasSucceeded = [session setConfiguration:configuration
+ active:YES
+ error:&error];
+ }
+ if (!hasSucceeded) {
+ RTCLogError(@"Error setting configuration: %@", error.localizedDescription);
+ }
+ [session unlockForConfiguration];
+}
+
+- (void)setupAudioPlayer {
+ NSString *audioFilePath =
+ [[NSBundle mainBundle] pathForResource:@"mozart" ofType:@"mp3"];
+ NSURL *audioFileURL = [NSURL URLWithString:audioFilePath];
+ _audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:audioFileURL
+ error:nil];
+ _audioPlayer.numberOfLoops = -1;
+ _audioPlayer.volume = 1.0;
+ [_audioPlayer prepareToPlay];
+}
+
+- (void)restartAudioPlayerIfNeeded {
+ [self configureAudioSession];
+ if (_mainView.isAudioLoopPlaying && !self.presentedViewController) {
+ RTCLog(@"Starting audio loop due to WebRTC end.");
+ [_audioPlayer play];
+ }
+}
+
+- (void)showAlertWithMessage:(NSString*)message {
+ UIAlertController *alert =
+ [UIAlertController alertControllerWithTitle:nil
+ message:message
+ preferredStyle:UIAlertControllerStyleAlert];
+
+ UIAlertAction *defaultAction = [UIAlertAction actionWithTitle:@"OK"
+ style:UIAlertActionStyleDefault
+ handler:^(UIAlertAction *action){
+ }];
+
+ [alert addAction:defaultAction];
+ [self presentViewController:alert animated:YES completion:nil];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.h
new file mode 100644
index 0000000000..759af5416f
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+@class ARDSettingsModel;
+
+NS_ASSUME_NONNULL_BEGIN
+/**
+ * Displays settings options.
+ */
+@interface ARDSettingsViewController : UITableViewController
+
+/**
+ * Creates new instance.
+ *
+ * @param style the table view style that should be used
+ * @param settingsModel model class for the user settings.
+ */
+- (instancetype)initWithStyle:(UITableViewStyle)style
+ settingsModel:(ARDSettingsModel *)settingsModel;
+
+#pragma mark - Unavailable
+
+- (instancetype)initWithStyle:(UITableViewStyle)style NS_UNAVAILABLE;
+- (instancetype)init NS_UNAVAILABLE;
++ (instancetype)new NS_UNAVAILABLE;
+
+@end
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.m
new file mode 100644
index 0000000000..9bcbd3aa5c
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.m
@@ -0,0 +1,361 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDSettingsViewController.h"
+#import "ARDSettingsModel.h"
+#import "RTCVideoCodecInfo+HumanReadable.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+typedef NS_ENUM(int, ARDSettingsSections) {
+ ARDSettingsSectionAudioSettings = 0,
+ ARDSettingsSectionVideoResolution,
+ ARDSettingsSectionVideoCodec,
+ ARDSettingsSectionBitRate,
+};
+
+typedef NS_ENUM(int, ARDAudioSettingsOptions) {
+ ARDAudioSettingsAudioOnly = 0,
+ ARDAudioSettingsCreateAecDump,
+ ARDAudioSettingsUseManualAudioConfig,
+};
+
+@interface ARDSettingsViewController () <UITextFieldDelegate> {
+ ARDSettingsModel *_settingsModel;
+}
+
+@end
+
+@implementation ARDSettingsViewController
+
+- (instancetype)initWithStyle:(UITableViewStyle)style
+ settingsModel:(ARDSettingsModel *)settingsModel {
+ self = [super initWithStyle:style];
+ if (self) {
+ _settingsModel = settingsModel;
+ }
+ return self;
+}
+
+#pragma mark - View lifecycle
+
+- (void)viewDidLoad {
+ [super viewDidLoad];
+ self.title = @"Settings";
+ [self addDoneBarButton];
+}
+
+- (void)viewWillAppear:(BOOL)animated {
+ [super viewWillAppear:animated];
+}
+
+#pragma mark - Data source
+
+- (NSArray<NSString *> *)videoResolutionArray {
+ return [_settingsModel availableVideoResolutions];
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)videoCodecArray {
+ return [_settingsModel availableVideoCodecs];
+}
+
+#pragma mark -
+
+- (void)addDoneBarButton {
+ UIBarButtonItem *barItem =
+ [[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemDone
+ target:self
+ action:@selector(dismissModally:)];
+ self.navigationItem.leftBarButtonItem = barItem;
+}
+
+#pragma mark - Dismissal of view controller
+
+- (void)dismissModally:(id)sender {
+ [self dismissViewControllerAnimated:YES completion:nil];
+}
+
+#pragma mark - Table view data source
+
+- (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView {
+ return 4;
+}
+
+- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
+ switch (section) {
+ case ARDSettingsSectionAudioSettings:
+ return 3;
+ case ARDSettingsSectionVideoResolution:
+ return self.videoResolutionArray.count;
+ case ARDSettingsSectionVideoCodec:
+ return self.videoCodecArray.count;
+ default:
+ return 1;
+ }
+}
+
+#pragma mark - Table view delegate helpers
+
+- (void)removeAllAccessories:(UITableView *)tableView
+ inSection:(int)section
+{
+ for (int i = 0; i < [tableView numberOfRowsInSection:section]; i++) {
+ NSIndexPath *rowPath = [NSIndexPath indexPathForRow:i inSection:section];
+ UITableViewCell *cell = [tableView cellForRowAtIndexPath:rowPath];
+ cell.accessoryType = UITableViewCellAccessoryNone;
+ }
+}
+
+- (void)tableView:(UITableView *)tableView
+updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
+ inSection:(int)section {
+ [self removeAllAccessories:tableView inSection:section];
+ UITableViewCell *cell = [tableView cellForRowAtIndexPath:indexPath];
+ cell.accessoryType = UITableViewCellAccessoryCheckmark;
+ [tableView deselectRowAtIndexPath:indexPath animated:YES];
+}
+
+#pragma mark - Table view delegate
+
+- (nullable NSString *)tableView:(UITableView *)tableView
+ titleForHeaderInSection:(NSInteger)section {
+ switch (section) {
+ case ARDSettingsSectionAudioSettings:
+ return @"Audio";
+ case ARDSettingsSectionVideoResolution:
+ return @"Video resolution";
+ case ARDSettingsSectionVideoCodec:
+ return @"Video codec";
+ case ARDSettingsSectionBitRate:
+ return @"Maximum bitrate";
+ default:
+ return @"";
+ }
+}
+
+- (UITableViewCell *)tableView:(UITableView *)tableView
+ cellForRowAtIndexPath:(NSIndexPath *)indexPath {
+ switch (indexPath.section) {
+ case ARDSettingsSectionAudioSettings:
+ return [self audioSettingsTableViewCellForTableView:tableView atIndexPath:indexPath];
+
+ case ARDSettingsSectionVideoResolution:
+ return [self videoResolutionTableViewCellForTableView:tableView atIndexPath:indexPath];
+
+ case ARDSettingsSectionVideoCodec:
+ return [self videoCodecTableViewCellForTableView:tableView atIndexPath:indexPath];
+
+ case ARDSettingsSectionBitRate:
+ return [self bitrateTableViewCellForTableView:tableView atIndexPath:indexPath];
+
+ default:
+ return [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
+ reuseIdentifier:@"identifier"];
+ }
+}
+
+- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath {
+ switch (indexPath.section) {
+ case ARDSettingsSectionVideoResolution:
+ [self tableView:tableView disSelectVideoResolutionAtIndex:indexPath];
+ break;
+
+ case ARDSettingsSectionVideoCodec:
+ [self tableView:tableView didSelectVideoCodecCellAtIndexPath:indexPath];
+ break;
+ }
+}
+
+#pragma mark - Table view delegate(Video Resolution)
+
+- (UITableViewCell *)videoResolutionTableViewCellForTableView:(UITableView *)tableView
+ atIndexPath:(NSIndexPath *)indexPath {
+ NSString *dequeueIdentifier = @"ARDSettingsVideoResolutionViewCellIdentifier";
+ UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
+ if (!cell) {
+ cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
+ reuseIdentifier:dequeueIdentifier];
+ }
+ NSString *resolution = self.videoResolutionArray[indexPath.row];
+ cell.textLabel.text = resolution;
+ if ([resolution isEqualToString:[_settingsModel currentVideoResolutionSettingFromStore]]) {
+ cell.accessoryType = UITableViewCellAccessoryCheckmark;
+ } else {
+ cell.accessoryType = UITableViewCellAccessoryNone;
+ }
+
+ return cell;
+}
+
+- (void)tableView:(UITableView *)tableView
+ disSelectVideoResolutionAtIndex:(NSIndexPath *)indexPath {
+ [self tableView:tableView
+ updateListSelectionAtIndexPath:indexPath
+ inSection:ARDSettingsSectionVideoResolution];
+
+ NSString *videoResolution = self.videoResolutionArray[indexPath.row];
+ [_settingsModel storeVideoResolutionSetting:videoResolution];
+}
+
+#pragma mark - Table view delegate(Video Codec)
+
+- (UITableViewCell *)videoCodecTableViewCellForTableView:(UITableView *)tableView
+ atIndexPath:(NSIndexPath *)indexPath {
+ NSString *dequeueIdentifier = @"ARDSettingsVideoCodecCellIdentifier";
+ UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
+ if (!cell) {
+ cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
+ reuseIdentifier:dequeueIdentifier];
+ }
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *codec = self.videoCodecArray[indexPath.row];
+ cell.textLabel.text = [codec humanReadableDescription];
+ if ([codec isEqualToCodecInfo:[_settingsModel currentVideoCodecSettingFromStore]]) {
+ cell.accessoryType = UITableViewCellAccessoryCheckmark;
+ } else {
+ cell.accessoryType = UITableViewCellAccessoryNone;
+ }
+
+ return cell;
+}
+
+- (void)tableView:(UITableView *)tableView
+ didSelectVideoCodecCellAtIndexPath:(NSIndexPath *)indexPath {
+ [self tableView:tableView
+ updateListSelectionAtIndexPath:indexPath
+ inSection:ARDSettingsSectionVideoCodec];
+
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodec = self.videoCodecArray[indexPath.row];
+ [_settingsModel storeVideoCodecSetting:videoCodec];
+}
+
+#pragma mark - Table view delegate(Bitrate)
+
+- (UITableViewCell *)bitrateTableViewCellForTableView:(UITableView *)tableView
+ atIndexPath:(NSIndexPath *)indexPath {
+ NSString *dequeueIdentifier = @"ARDSettingsBitrateCellIdentifier";
+ UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
+ if (!cell) {
+ cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
+ reuseIdentifier:dequeueIdentifier];
+
+ UITextField *textField = [[UITextField alloc]
+ initWithFrame:CGRectMake(10, 0, cell.bounds.size.width - 20, cell.bounds.size.height)];
+ NSString *currentMaxBitrate = [_settingsModel currentMaxBitrateSettingFromStore].stringValue;
+ textField.text = currentMaxBitrate;
+ textField.placeholder = @"Enter max bit rate (kbps)";
+ textField.keyboardType = UIKeyboardTypeNumberPad;
+ textField.delegate = self;
+
+ // Numerical keyboards have no return button, we need to add one manually.
+ UIToolbar *numberToolbar =
+ [[UIToolbar alloc] initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, 50)];
+ numberToolbar.items = @[
+ [[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemFlexibleSpace
+ target:nil
+ action:nil],
+ [[UIBarButtonItem alloc] initWithTitle:@"Apply"
+ style:UIBarButtonItemStyleDone
+ target:self
+ action:@selector(numberTextFieldDidEndEditing:)]
+ ];
+ [numberToolbar sizeToFit];
+
+ textField.inputAccessoryView = numberToolbar;
+ [cell addSubview:textField];
+ }
+ return cell;
+}
+
+- (void)numberTextFieldDidEndEditing:(id)sender {
+ [self.view endEditing:YES];
+}
+
+- (void)textFieldDidEndEditing:(UITextField *)textField {
+ NSNumber *bitrateNumber = nil;
+
+ if (textField.text.length != 0) {
+ bitrateNumber = [NSNumber numberWithInteger:textField.text.intValue];
+ }
+
+ [_settingsModel storeMaxBitrateSetting:bitrateNumber];
+}
+
+#pragma mark - Table view delegate(Audio settings)
+
+- (UITableViewCell *)audioSettingsTableViewCellForTableView:(UITableView *)tableView
+ atIndexPath:(NSIndexPath *)indexPath {
+ NSString *dequeueIdentifier = @"ARDSettingsAudioSettingsCellIdentifier";
+ UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
+ if (!cell) {
+ cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
+ reuseIdentifier:dequeueIdentifier];
+ cell.selectionStyle = UITableViewCellSelectionStyleNone;
+ UISwitch *switchView = [[UISwitch alloc] initWithFrame:CGRectZero];
+ switchView.tag = indexPath.row;
+ [switchView addTarget:self
+ action:@selector(audioSettingSwitchChanged:)
+ forControlEvents:UIControlEventValueChanged];
+ cell.accessoryView = switchView;
+ }
+
+ cell.textLabel.text = [self labelForAudioSettingAtIndexPathRow:indexPath.row];
+ UISwitch *switchView = (UISwitch *)cell.accessoryView;
+ switchView.on = [self valueForAudioSettingAtIndexPathRow:indexPath.row];
+
+ return cell;
+}
+
+- (NSString *)labelForAudioSettingAtIndexPathRow:(NSInteger)setting {
+ switch (setting) {
+ case ARDAudioSettingsAudioOnly:
+ return @"Audio only";
+ case ARDAudioSettingsCreateAecDump:
+ return @"Create AecDump";
+ case ARDAudioSettingsUseManualAudioConfig:
+ return @"Use manual audio config";
+ default:
+ return @"";
+ }
+}
+
+- (BOOL)valueForAudioSettingAtIndexPathRow:(NSInteger)setting {
+ switch (setting) {
+ case ARDAudioSettingsAudioOnly:
+ return [_settingsModel currentAudioOnlySettingFromStore];
+ case ARDAudioSettingsCreateAecDump:
+ return [_settingsModel currentCreateAecDumpSettingFromStore];
+ case ARDAudioSettingsUseManualAudioConfig:
+ return [_settingsModel currentUseManualAudioConfigSettingFromStore];
+ default:
+ return NO;
+ }
+}
+
+- (void)audioSettingSwitchChanged:(UISwitch *)sender {
+ switch (sender.tag) {
+ case ARDAudioSettingsAudioOnly: {
+ [_settingsModel storeAudioOnlySetting:sender.isOn];
+ break;
+ }
+ case ARDAudioSettingsCreateAecDump: {
+ [_settingsModel storeCreateAecDumpSetting:sender.isOn];
+ break;
+ }
+ case ARDAudioSettingsUseManualAudioConfig: {
+ [_settingsModel storeUseManualAudioConfigSetting:sender.isOn];
+ break;
+ }
+ default:
+ break;
+ }
+}
+
+@end
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.h
new file mode 100644
index 0000000000..72207de64e
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+#import "sdk/objc/base/RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCStatisticsReport);
+
+@interface ARDStatsView : UIView
+
+- (void)setStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.m
new file mode 100644
index 0000000000..867ba5b09e
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDStatsView.m
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDStatsView.h"
+
+#import "sdk/objc/api/peerconnection/RTCLegacyStatsReport.h"
+
+#import "ARDStatsBuilder.h"
+
+@implementation ARDStatsView {
+ UILabel *_statsLabel;
+ ARDStatsBuilder *_statsBuilder;
+}
+
+- (instancetype)initWithFrame:(CGRect)frame {
+ if (self = [super initWithFrame:frame]) {
+ _statsLabel = [[UILabel alloc] initWithFrame:CGRectZero];
+ _statsLabel.numberOfLines = 0;
+ _statsLabel.font = [UIFont fontWithName:@"Roboto" size:12];
+ _statsLabel.adjustsFontSizeToFitWidth = YES;
+ _statsLabel.minimumScaleFactor = 0.6;
+ _statsLabel.textColor = [UIColor greenColor];
+ [self addSubview:_statsLabel];
+ self.backgroundColor = [UIColor colorWithWhite:0 alpha:.6];
+ _statsBuilder = [[ARDStatsBuilder alloc] init];
+ }
+ return self;
+}
+
+- (void)setStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
+ _statsBuilder.stats = stats;
+ _statsLabel.text = _statsBuilder.statsString;
+}
+
+- (void)layoutSubviews {
+ _statsLabel.frame = self.bounds;
+}
+
+- (CGSize)sizeThatFits:(CGSize)size {
+ return [_statsLabel sizeThatFits:size];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.h
new file mode 100644
index 0000000000..a31c7fe742
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+#import "sdk/objc/base/RTCVideoRenderer.h"
+#import "sdk/objc/helpers/RTCCameraPreviewView.h"
+
+#import "ARDStatsView.h"
+
+@class ARDVideoCallView;
+@protocol ARDVideoCallViewDelegate <NSObject>
+
+// Called when the camera switch button is pressed.
+- (void)videoCallView:(ARDVideoCallView *)view
+ shouldSwitchCameraWithCompletion:(void (^)(NSError *))completion;
+
+// Called when the route change button is pressed.
+- (void)videoCallView:(ARDVideoCallView *)view
+ shouldChangeRouteWithCompletion:(void (^)(void))completion;
+
+// Called when the hangup button is pressed.
+- (void)videoCallViewDidHangup:(ARDVideoCallView *)view;
+
+// Called when stats are enabled by triple tapping.
+- (void)videoCallViewDidEnableStats:(ARDVideoCallView *)view;
+
+@end
+
+// Video call view that shows local and remote video, provides a label to
+// display status, and also a hangup button.
+@interface ARDVideoCallView : UIView
+
+@property(nonatomic, readonly) UILabel *statusLabel;
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCCameraPreviewView) * localVideoView;
+@property(nonatomic, readonly) __kindof UIView<RTC_OBJC_TYPE(RTCVideoRenderer)> *remoteVideoView;
+@property(nonatomic, readonly) ARDStatsView *statsView;
+@property(nonatomic, weak) id<ARDVideoCallViewDelegate> delegate;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m
new file mode 100644
index 0000000000..437aea8d56
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m
@@ -0,0 +1,213 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDVideoCallView.h"
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "sdk/objc/components/renderer/metal/RTCMTLVideoView.h"
+
+#import "UIImage+ARDUtilities.h"
+
+static CGFloat const kButtonPadding = 16;
+static CGFloat const kButtonSize = 48;
+static CGFloat const kLocalVideoViewSize = 120;
+static CGFloat const kLocalVideoViewPadding = 8;
+static CGFloat const kStatusBarHeight = 20;
+
+@interface ARDVideoCallView () <RTC_OBJC_TYPE (RTCVideoViewDelegate)>
+@end
+
+@implementation ARDVideoCallView {
+ UIButton *_routeChangeButton;
+ UIButton *_cameraSwitchButton;
+ UIButton *_hangupButton;
+ CGSize _remoteVideoSize;
+}
+
+@synthesize statusLabel = _statusLabel;
+@synthesize localVideoView = _localVideoView;
+@synthesize remoteVideoView = _remoteVideoView;
+@synthesize statsView = _statsView;
+@synthesize delegate = _delegate;
+
+- (instancetype)initWithFrame:(CGRect)frame {
+ if (self = [super initWithFrame:frame]) {
+
+ _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
+
+ [self addSubview:_remoteVideoView];
+
+ _localVideoView = [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero];
+ [self addSubview:_localVideoView];
+
+ _statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
+ _statsView.hidden = YES;
+ [self addSubview:_statsView];
+
+ _routeChangeButton = [UIButton buttonWithType:UIButtonTypeCustom];
+ _routeChangeButton.backgroundColor = [UIColor grayColor];
+ _routeChangeButton.layer.cornerRadius = kButtonSize / 2;
+ _routeChangeButton.layer.masksToBounds = YES;
+ UIImage *image = [UIImage imageForName:@"ic_surround_sound_black_24dp.png"
+ color:[UIColor whiteColor]];
+ [_routeChangeButton setImage:image forState:UIControlStateNormal];
+ [_routeChangeButton addTarget:self
+ action:@selector(onRouteChange:)
+ forControlEvents:UIControlEventTouchUpInside];
+ [self addSubview:_routeChangeButton];
+
+ // TODO(tkchin): don't display this if we can't actually do camera switch.
+ _cameraSwitchButton = [UIButton buttonWithType:UIButtonTypeCustom];
+ _cameraSwitchButton.backgroundColor = [UIColor grayColor];
+ _cameraSwitchButton.layer.cornerRadius = kButtonSize / 2;
+ _cameraSwitchButton.layer.masksToBounds = YES;
+ image = [UIImage imageForName:@"ic_switch_video_black_24dp.png" color:[UIColor whiteColor]];
+ [_cameraSwitchButton setImage:image forState:UIControlStateNormal];
+ [_cameraSwitchButton addTarget:self
+ action:@selector(onCameraSwitch:)
+ forControlEvents:UIControlEventTouchUpInside];
+ [self addSubview:_cameraSwitchButton];
+
+ _hangupButton = [UIButton buttonWithType:UIButtonTypeCustom];
+ _hangupButton.backgroundColor = [UIColor redColor];
+ _hangupButton.layer.cornerRadius = kButtonSize / 2;
+ _hangupButton.layer.masksToBounds = YES;
+ image = [UIImage imageForName:@"ic_call_end_black_24dp.png"
+ color:[UIColor whiteColor]];
+ [_hangupButton setImage:image forState:UIControlStateNormal];
+ [_hangupButton addTarget:self
+ action:@selector(onHangup:)
+ forControlEvents:UIControlEventTouchUpInside];
+ [self addSubview:_hangupButton];
+
+ _statusLabel = [[UILabel alloc] initWithFrame:CGRectZero];
+ _statusLabel.font = [UIFont fontWithName:@"Roboto" size:16];
+ _statusLabel.textColor = [UIColor whiteColor];
+ [self addSubview:_statusLabel];
+
+ UITapGestureRecognizer *tapRecognizer =
+ [[UITapGestureRecognizer alloc]
+ initWithTarget:self
+ action:@selector(didTripleTap:)];
+ tapRecognizer.numberOfTapsRequired = 3;
+ [self addGestureRecognizer:tapRecognizer];
+ }
+ return self;
+}
+
+- (void)layoutSubviews {
+ CGRect bounds = self.bounds;
+ if (_remoteVideoSize.width > 0 && _remoteVideoSize.height > 0) {
+ // Aspect fill remote video into bounds.
+ CGRect remoteVideoFrame =
+ AVMakeRectWithAspectRatioInsideRect(_remoteVideoSize, bounds);
+ CGFloat scale = 1;
+ if (remoteVideoFrame.size.width > remoteVideoFrame.size.height) {
+ // Scale by height.
+ scale = bounds.size.height / remoteVideoFrame.size.height;
+ } else {
+ // Scale by width.
+ scale = bounds.size.width / remoteVideoFrame.size.width;
+ }
+ remoteVideoFrame.size.height *= scale;
+ remoteVideoFrame.size.width *= scale;
+ _remoteVideoView.frame = remoteVideoFrame;
+ _remoteVideoView.center =
+ CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
+ } else {
+ _remoteVideoView.frame = bounds;
+ }
+
+ // Aspect fit local video view into a square box.
+ CGRect localVideoFrame =
+ CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
+ // Place the view in the bottom right.
+ localVideoFrame.origin.x = CGRectGetMaxX(bounds)
+ - localVideoFrame.size.width - kLocalVideoViewPadding;
+ localVideoFrame.origin.y = CGRectGetMaxY(bounds)
+ - localVideoFrame.size.height - kLocalVideoViewPadding;
+ _localVideoView.frame = localVideoFrame;
+
+ // Place stats at the top.
+ CGSize statsSize = [_statsView sizeThatFits:bounds.size];
+ _statsView.frame = CGRectMake(CGRectGetMinX(bounds),
+ CGRectGetMinY(bounds) + kStatusBarHeight,
+ statsSize.width, statsSize.height);
+
+ // Place hangup button in the bottom left.
+ _hangupButton.frame =
+ CGRectMake(CGRectGetMinX(bounds) + kButtonPadding,
+ CGRectGetMaxY(bounds) - kButtonPadding -
+ kButtonSize,
+ kButtonSize,
+ kButtonSize);
+
+ // Place button to the right of hangup button.
+ CGRect cameraSwitchFrame = _hangupButton.frame;
+ cameraSwitchFrame.origin.x =
+ CGRectGetMaxX(cameraSwitchFrame) + kButtonPadding;
+ _cameraSwitchButton.frame = cameraSwitchFrame;
+
+ // Place route button to the right of camera button.
+ CGRect routeChangeFrame = _cameraSwitchButton.frame;
+ routeChangeFrame.origin.x =
+ CGRectGetMaxX(routeChangeFrame) + kButtonPadding;
+ _routeChangeButton.frame = routeChangeFrame;
+
+ [_statusLabel sizeToFit];
+ _statusLabel.center =
+ CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCVideoViewDelegate)
+
+- (void)videoView:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize:(CGSize)size {
+ if (videoView == _remoteVideoView) {
+ _remoteVideoSize = size;
+ }
+ [self setNeedsLayout];
+}
+
+#pragma mark - Private
+
+- (void)onCameraSwitch:(UIButton *)sender {
+ sender.enabled = false;
+ [_delegate videoCallView:self
+ shouldSwitchCameraWithCompletion:^(NSError *error) {
+ dispatch_async(dispatch_get_main_queue(), ^(void) {
+ sender.enabled = true;
+ });
+ }];
+}
+
+- (void)onRouteChange:(UIButton *)sender {
+ sender.enabled = false;
+ __weak ARDVideoCallView *weakSelf = self;
+ [_delegate videoCallView:self
+ shouldChangeRouteWithCompletion:^(void) {
+ ARDVideoCallView *strongSelf = weakSelf;
+ if (strongSelf) {
+ dispatch_async(dispatch_get_main_queue(), ^(void) {
+ sender.enabled = true;
+ });
+ }
+ }];
+}
+
+- (void)onHangup:(id)sender {
+ [_delegate videoCallViewDidHangup:self];
+}
+
+- (void)didTripleTap:(UITapGestureRecognizer *)recognizer {
+ [_delegate videoCallViewDidEnableStats:self];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.h
new file mode 100644
index 0000000000..bdb8747524
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+@class ARDVideoCallViewController;
+@protocol ARDVideoCallViewControllerDelegate <NSObject>
+
+- (void)viewControllerDidFinish:(ARDVideoCallViewController *)viewController;
+
+@end
+
+@interface ARDVideoCallViewController : UIViewController
+
+@property(nonatomic, weak) id<ARDVideoCallViewControllerDelegate> delegate;
+
+- (instancetype)initForRoom:(NSString *)room
+ isLoopback:(BOOL)isLoopback
+ delegate:(id<ARDVideoCallViewControllerDelegate>)delegate;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m
new file mode 100644
index 0000000000..a82d90b290
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m
@@ -0,0 +1,250 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDVideoCallViewController.h"
+
+#import "sdk/objc/api/peerconnection/RTCMediaConstraints.h"
+#import "sdk/objc/base/RTCLogging.h"
+#import "sdk/objc/components/audio/RTCAudioSession.h"
+#import "sdk/objc/components/capturer/RTCCameraVideoCapturer.h"
+#import "sdk/objc/helpers/RTCDispatcher.h"
+
+#import "ARDAppClient.h"
+#import "ARDCaptureController.h"
+#import "ARDFileCaptureController.h"
+#import "ARDSettingsModel.h"
+#import "ARDVideoCallView.h"
+
+@interface ARDVideoCallViewController () <ARDAppClientDelegate,
+ ARDVideoCallViewDelegate,
+ RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCVideoTrack) * remoteVideoTrack;
+@property(nonatomic, readonly) ARDVideoCallView *videoCallView;
+@property(nonatomic, assign) AVAudioSessionPortOverride portOverride;
+@end
+
+@implementation ARDVideoCallViewController {
+ ARDAppClient *_client;
+ RTC_OBJC_TYPE(RTCVideoTrack) * _remoteVideoTrack;
+ ARDCaptureController *_captureController;
+ ARDFileCaptureController *_fileCaptureController NS_AVAILABLE_IOS(10);
+}
+
+@synthesize videoCallView = _videoCallView;
+@synthesize remoteVideoTrack = _remoteVideoTrack;
+@synthesize delegate = _delegate;
+@synthesize portOverride = _portOverride;
+
+- (instancetype)initForRoom:(NSString *)room
+ isLoopback:(BOOL)isLoopback
+ delegate:(id<ARDVideoCallViewControllerDelegate>)delegate {
+ if (self = [super init]) {
+ ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
+ _delegate = delegate;
+
+ _client = [[ARDAppClient alloc] initWithDelegate:self];
+ [_client connectToRoomWithId:room settings:settingsModel isLoopback:isLoopback];
+ }
+ return self;
+}
+
+- (void)loadView {
+ _videoCallView = [[ARDVideoCallView alloc] initWithFrame:CGRectZero];
+ _videoCallView.delegate = self;
+ _videoCallView.statusLabel.text =
+ [self statusTextForState:RTCIceConnectionStateNew];
+ self.view = _videoCallView;
+
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session addDelegate:self];
+}
+
+- (UIInterfaceOrientationMask)supportedInterfaceOrientations {
+ return UIInterfaceOrientationMaskAll;
+}
+
+#pragma mark - ARDAppClientDelegate
+
+- (void)appClient:(ARDAppClient *)client
+ didChangeState:(ARDAppClientState)state {
+ switch (state) {
+ case kARDAppClientStateConnected:
+ RTCLog(@"Client connected.");
+ break;
+ case kARDAppClientStateConnecting:
+ RTCLog(@"Client connecting.");
+ break;
+ case kARDAppClientStateDisconnected:
+ RTCLog(@"Client disconnected.");
+ [self hangup];
+ break;
+ }
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didChangeConnectionState:(RTCIceConnectionState)state {
+ RTCLog(@"ICE state changed: %ld", (long)state);
+ __weak ARDVideoCallViewController *weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ ARDVideoCallViewController *strongSelf = weakSelf;
+ strongSelf.videoCallView.statusLabel.text =
+ [strongSelf statusTextForState:state];
+ });
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
+ _videoCallView.localVideoView.captureSession = localCapturer.captureSession;
+ ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
+ _captureController =
+ [[ARDCaptureController alloc] initWithCapturer:localCapturer settings:settingsModel];
+ [_captureController startCapture];
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer {
+#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
+ if (@available(iOS 10, *)) {
+ _fileCaptureController = [[ARDFileCaptureController alloc] initWithCapturer:fileCapturer];
+ [_fileCaptureController startCapture];
+ }
+#endif
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack {
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
+ self.remoteVideoTrack = remoteVideoTrack;
+ __weak ARDVideoCallViewController *weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ ARDVideoCallViewController *strongSelf = weakSelf;
+ strongSelf.videoCallView.statusLabel.hidden = YES;
+ });
+}
+
+- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
+ _videoCallView.statsView.stats = stats;
+ [_videoCallView setNeedsLayout];
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didError:(NSError *)error {
+ NSString *message =
+ [NSString stringWithFormat:@"%@", error.localizedDescription];
+ [self hangup];
+ [self showAlertWithMessage:message];
+}
+
+#pragma mark - ARDVideoCallViewDelegate
+
+- (void)videoCallViewDidHangup:(ARDVideoCallView *)view {
+ [self hangup];
+}
+
+- (void)videoCallView:(ARDVideoCallView *)view
+ shouldSwitchCameraWithCompletion:(void (^)(NSError *))completion {
+ [_captureController switchCamera:completion];
+}
+
+- (void)videoCallView:(ARDVideoCallView *)view
+ shouldChangeRouteWithCompletion:(void (^)(void))completion {
+ NSParameterAssert(completion);
+ AVAudioSessionPortOverride override = AVAudioSessionPortOverrideNone;
+ if (_portOverride == AVAudioSessionPortOverrideNone) {
+ override = AVAudioSessionPortOverrideSpeaker;
+ }
+ [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeAudioSession
+ block:^{
+ RTC_OBJC_TYPE(RTCAudioSession) *session =
+ [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session lockForConfiguration];
+ NSError *error = nil;
+ if ([session overrideOutputAudioPort:override
+ error:&error]) {
+ self.portOverride = override;
+ } else {
+ RTCLogError(@"Error overriding output port: %@",
+ error.localizedDescription);
+ }
+ [session unlockForConfiguration];
+ completion();
+ }];
+}
+
+- (void)videoCallViewDidEnableStats:(ARDVideoCallView *)view {
+ _client.shouldGetStats = YES;
+ _videoCallView.statsView.hidden = NO;
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
+
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+ didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches {
+ RTCLog(@"Audio session detected glitch, total: %lld", totalNumberOfGlitches);
+}
+
+#pragma mark - Private
+
+- (void)setRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
+ if (_remoteVideoTrack == remoteVideoTrack) {
+ return;
+ }
+ [_remoteVideoTrack removeRenderer:_videoCallView.remoteVideoView];
+ _remoteVideoTrack = nil;
+ [_videoCallView.remoteVideoView renderFrame:nil];
+ _remoteVideoTrack = remoteVideoTrack;
+ [_remoteVideoTrack addRenderer:_videoCallView.remoteVideoView];
+}
+
+- (void)hangup {
+ self.remoteVideoTrack = nil;
+ _videoCallView.localVideoView.captureSession = nil;
+ [_captureController stopCapture];
+ _captureController = nil;
+ [_fileCaptureController stopCapture];
+ _fileCaptureController = nil;
+ [_client disconnect];
+ [_delegate viewControllerDidFinish:self];
+}
+
+- (NSString *)statusTextForState:(RTCIceConnectionState)state {
+ switch (state) {
+ case RTCIceConnectionStateNew:
+ case RTCIceConnectionStateChecking:
+ return @"Connecting...";
+ case RTCIceConnectionStateConnected:
+ case RTCIceConnectionStateCompleted:
+ case RTCIceConnectionStateFailed:
+ case RTCIceConnectionStateDisconnected:
+ case RTCIceConnectionStateClosed:
+ case RTCIceConnectionStateCount:
+ return nil;
+ }
+}
+
+- (void)showAlertWithMessage:(NSString*)message {
+ UIAlertController *alert =
+ [UIAlertController alertControllerWithTitle:nil
+ message:message
+ preferredStyle:UIAlertControllerStyleAlert];
+
+ UIAlertAction *defaultAction = [UIAlertAction actionWithTitle:@"OK"
+ style:UIAlertActionStyleDefault
+ handler:^(UIAlertAction *action){
+ }];
+
+ [alert addAction:defaultAction];
+ [self presentViewController:alert animated:YES completion:nil];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/Info.plist b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/Info.plist
new file mode 100644
index 0000000000..a2f0a683ed
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/Info.plist
@@ -0,0 +1,109 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>BuildMachineOSBuild</key>
+ <string>12E55</string>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>AppRTCMobile</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIcons</key>
+ <dict>
+ <key>CFBundlePrimaryIcon</key>
+ <dict>
+ <key>CFBundleIconFiles</key>
+ <array>
+ <string>Icon.png</string>
+ <string>Icon-120.png</string>
+ <string>Icon-180.png</string>
+ </array>
+ </dict>
+ </dict>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.AppRTCMobile</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleSupportedPlatforms</key>
+ <array>
+ <string>iPhoneOS</string>
+ </array>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>UIStatusBarTintParameters</key>
+ <dict>
+ <key>UINavigationBar</key>
+ <dict>
+ <key>Style</key>
+ <string>UIBarStyleDefault</string>
+ <key>Translucent</key>
+ <false/>
+ </dict>
+ </dict>
+ <key>UISupportedInterfaceOrientations</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ <string>UIInterfaceOrientationPortraitUpsideDown</string>
+ </array>
+ <key>UIAppFonts</key>
+ <array>
+ <string>Roboto-Regular.ttf</string>
+ </array>
+ <key>UIBackgroundModes</key>
+ <array>
+ <string>audio</string>
+ <string>voip</string>
+ </array>
+ <key>NSCameraUsageDescription</key>
+ <string>Camera access needed for video calling</string>
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Microphone access needed for video calling</string>
+ <key>UIFileSharingEnabled</key>
+ <true/>
+ <key>UILaunchImages</key>
+ <array>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>7.0</string>
+ <key>UILaunchImageName</key>
+ <string>iPhone5</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Portrait</string>
+ <key>UILaunchImageSize</key>
+ <string>{320, 568}</string>
+ </dict>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>8.0</string>
+ <key>UILaunchImageName</key>
+ <string>iPhone6</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Portrait</string>
+ <key>UILaunchImageSize</key>
+ <string>{375, 667}</string>
+ </dict>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>8.0</string>
+ <key>UILaunchImageName</key>
+ <string>iPhone6p</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Portrait</string>
+ <key>UILaunchImageSize</key>
+ <string>{414, 736}</string>
+ </dict>
+ </array>
+</dict>
+</plist>
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.h
new file mode 100644
index 0000000000..3a93c253b2
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "sdk/objc/base/RTCVideoCodecInfo.h"
+
+@interface RTC_OBJC_TYPE (RTCVideoCodecInfo)
+(HumanReadable)
+
+ - (NSString *)humanReadableDescription;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m
new file mode 100644
index 0000000000..5e0c52c5c4
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoCodecInfo+HumanReadable.h"
+
+#import "sdk/objc/components/video_codec/RTCH264ProfileLevelId.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo)
+(HumanReadable)
+
+ - (NSString *)humanReadableDescription {
+ if ([self.name isEqualToString:@"H264"]) {
+ NSString *profileId = self.parameters[@"profile-level-id"];
+ RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
+ [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:profileId];
+ if (profileLevelId.profile == RTCH264ProfileConstrainedHigh ||
+ profileLevelId.profile == RTCH264ProfileHigh) {
+ return @"H264 (High)";
+ } else if (profileLevelId.profile == RTCH264ProfileConstrainedBaseline ||
+ profileLevelId.profile == RTCH264ProfileBaseline) {
+ return @"H264 (Baseline)";
+ } else {
+ return [NSString stringWithFormat:@"H264 (%@)", profileId];
+ }
+ } else {
+ return self.name;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.h
new file mode 100644
index 0000000000..d56ba02c2e
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+@interface UIImage (ARDUtilities)
+
+// Returns an color tinted version for the given image resource.
++ (UIImage *)imageForName:(NSString *)name color:(UIColor *)color;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.m
new file mode 100644
index 0000000000..1bbe8c342f
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/UIImage+ARDUtilities.m
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "UIImage+ARDUtilities.h"
+
+@implementation UIImage (ARDUtilities)
+
++ (UIImage *)imageForName:(NSString *)name color:(UIColor *)color {
+ UIImage *image = [UIImage imageNamed:name];
+ if (!image) {
+ return nil;
+ }
+ UIGraphicsBeginImageContextWithOptions(image.size, NO, 0.0f);
+ [color setFill];
+ CGRect bounds = CGRectMake(0, 0, image.size.width, image.size.height);
+ UIRectFill(bounds);
+ [image drawInRect:bounds blendMode:kCGBlendModeDestinationIn alpha:1.0f];
+ UIImage *coloredImage = UIGraphicsGetImageFromCurrentImageContext();
+ UIGraphicsEndImageContext();
+
+ return coloredImage;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h
new file mode 100644
index 0000000000..2c4a56368a
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <ReplayKit/ReplayKit.h>
+
+#import "sdk/objc/base/RTCLogging.h"
+
+#import "ARDAppClient.h"
+
+@protocol ARDExternalSampleDelegate;
+
+API_AVAILABLE(ios(10.0))
+@interface ARDBroadcastSampleHandler : RPBroadcastSampleHandler <ARDAppClientDelegate>
+
+@property(nonatomic, strong) id<ARDExternalSampleDelegate> capturer;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m
new file mode 100644
index 0000000000..1c276d965f
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDBroadcastSampleHandler.h"
+
+#import <os/log.h>
+
+#import "ARDExternalSampleCapturer.h"
+#import "ARDSettingsModel.h"
+
+#import "sdk/objc/api/logging/RTCCallbackLogger.h"
+#import "sdk/objc/base/RTCLogging.h"
+
+@implementation ARDBroadcastSampleHandler {
+ ARDAppClient *_client;
+ RTC_OBJC_TYPE(RTCCallbackLogger) * _callbackLogger;
+}
+
+@synthesize capturer = _capturer;
+
+- (instancetype)init {
+ if (self = [super init]) {
+ _callbackLogger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init];
+ os_log_t rtc_os_log = os_log_create("com.google.AppRTCMobile", "RTCLog");
+ [_callbackLogger start:^(NSString *logMessage) {
+ os_log(rtc_os_log, "%{public}s", [logMessage cStringUsingEncoding:NSUTF8StringEncoding]);
+ }];
+ }
+ return self;
+}
+
+- (void)broadcastStartedWithSetupInfo:(NSDictionary<NSString *, NSObject *> *)setupInfo {
+ // User has requested to start the broadcast. Setup info from the UI extension can be supplied but
+ // optional.
+ ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
+
+ _client = [[ARDAppClient alloc] initWithDelegate:self];
+ _client.broadcast = YES;
+
+ NSString *roomName = nil;
+ if (setupInfo[@"roomName"]) {
+ roomName = (NSString *)setupInfo[@"roomName"];
+ } else {
+ u_int32_t randomRoomSuffix = arc4random_uniform(1000);
+ roomName = [NSString stringWithFormat:@"broadcast_%d", randomRoomSuffix];
+ }
+ [_client connectToRoomWithId:roomName settings:settingsModel isLoopback:NO];
+ RTCLog(@"Broadcast started.");
+}
+
+- (void)broadcastPaused {
+ // User has requested to pause the broadcast. Samples will stop being delivered.
+}
+
+- (void)broadcastResumed {
+ // User has requested to resume the broadcast. Samples delivery will resume.
+}
+
+- (void)broadcastFinished {
+ // User has requested to finish the broadcast.
+ [_client disconnect];
+}
+
+- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ withType:(RPSampleBufferType)sampleBufferType {
+ switch (sampleBufferType) {
+ case RPSampleBufferTypeVideo:
+ [self.capturer didCaptureSampleBuffer:sampleBuffer];
+ break;
+ case RPSampleBufferTypeAudioApp:
+ break;
+ case RPSampleBufferTypeAudioMic:
+ break;
+ default:
+ break;
+ }
+}
+
+#pragma mark - ARDAppClientDelegate
+
+- (void)appClient:(ARDAppClient *)client didChangeState:(ARDAppClientState)state {
+ switch (state) {
+ case kARDAppClientStateConnected:
+ RTCLog(@"Client connected.");
+ break;
+ case kARDAppClientStateConnecting:
+ RTCLog("Client connecting.");
+ break;
+ case kARDAppClientStateDisconnected:
+ RTCLog(@"Client disconnected.");
+ break;
+ }
+}
+
+- (void)appClient:(ARDAppClient *)client didChangeConnectionState:(RTCIceConnectionState)state {
+ RTCLog(@"ICE state changed: %ld", (long)state);
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer {
+ self.capturer = externalSampleCapturer;
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack {
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
+}
+
+- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
+}
+
+- (void)appClient:(ARDAppClient *)client didError:(NSError *)error {
+ RTCLog(@"Error: %@", error);
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h
new file mode 100644
index 0000000000..bbf397d8a9
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <ReplayKit/ReplayKit.h>
+#import <UIKit/UIKit.h>
+
+API_AVAILABLE(ios(11.0))
+@interface ARDBroadcastSetupViewController : UIViewController <UITextFieldDelegate>
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m
new file mode 100644
index 0000000000..55438f17d8
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ARDBroadcastSetupViewController.h"
+
+@implementation ARDBroadcastSetupViewController {
+ UITextField *_roomNameField;
+}
+
+- (void)loadView {
+ UIView *view = [[UIView alloc] initWithFrame:CGRectZero];
+ view.backgroundColor = [UIColor colorWithWhite:1.0 alpha:0.7];
+
+ UIImageView *imageView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"Icon-180"]];
+ imageView.translatesAutoresizingMaskIntoConstraints = NO;
+ [view addSubview:imageView];
+
+ _roomNameField = [[UITextField alloc] initWithFrame:CGRectZero];
+ _roomNameField.borderStyle = UITextBorderStyleRoundedRect;
+ _roomNameField.font = [UIFont systemFontOfSize:14.0];
+ _roomNameField.translatesAutoresizingMaskIntoConstraints = NO;
+ _roomNameField.placeholder = @"Room name";
+ _roomNameField.returnKeyType = UIReturnKeyDone;
+ _roomNameField.delegate = self;
+ [view addSubview:_roomNameField];
+
+ UIButton *doneButton = [UIButton buttonWithType:UIButtonTypeSystem];
+ doneButton.translatesAutoresizingMaskIntoConstraints = NO;
+ doneButton.titleLabel.font = [UIFont systemFontOfSize:20.0];
+ [doneButton setTitle:@"Done" forState:UIControlStateNormal];
+ [doneButton addTarget:self
+ action:@selector(userDidFinishSetup)
+ forControlEvents:UIControlEventTouchUpInside];
+ [view addSubview:doneButton];
+
+ UIButton *cancelButton = [UIButton buttonWithType:UIButtonTypeSystem];
+ cancelButton.translatesAutoresizingMaskIntoConstraints = NO;
+ cancelButton.titleLabel.font = [UIFont systemFontOfSize:20.0];
+ [cancelButton setTitle:@"Cancel" forState:UIControlStateNormal];
+ [cancelButton addTarget:self
+ action:@selector(userDidCancelSetup)
+ forControlEvents:UIControlEventTouchUpInside];
+ [view addSubview:cancelButton];
+
+ UILayoutGuide *margin = view.layoutMarginsGuide;
+ [imageView.widthAnchor constraintEqualToConstant:60.0].active = YES;
+ [imageView.heightAnchor constraintEqualToConstant:60.0].active = YES;
+ [imageView.topAnchor constraintEqualToAnchor:margin.topAnchor constant:20].active = YES;
+ [imageView.centerXAnchor constraintEqualToAnchor:view.centerXAnchor].active = YES;
+
+ [_roomNameField.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES;
+ [_roomNameField.topAnchor constraintEqualToAnchor:imageView.bottomAnchor constant:20].active =
+ YES;
+ [_roomNameField.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES;
+
+ [doneButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES;
+ [doneButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:-20].active = YES;
+
+ [cancelButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES;
+ [cancelButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:-20].active = YES;
+
+ UITapGestureRecognizer *tgr =
+ [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(didTap:)];
+ [view addGestureRecognizer:tgr];
+
+ self.view = view;
+}
+
+- (IBAction)didTap:(id)sender {
+ [self.view endEditing:YES];
+}
+
+- (void)userDidFinishSetup {
+ // URL of the resource where broadcast can be viewed that will be returned to the application
+ NSURL *broadcastURL = [NSURL
+ URLWithString:[NSString stringWithFormat:@"https://appr.tc/r/%@", _roomNameField.text]];
+
+ // Dictionary with setup information that will be provided to broadcast extension when broadcast
+ // is started
+ NSDictionary *setupInfo = @{@"roomName" : _roomNameField.text};
+
+ // Tell ReplayKit that the extension is finished setting up and can begin broadcasting
+ [self.extensionContext completeRequestWithBroadcastURL:broadcastURL setupInfo:setupInfo];
+}
+
+- (void)userDidCancelSetup {
+ // Tell ReplayKit that the extension was cancelled by the user
+ [self.extensionContext cancelRequestWithError:[NSError errorWithDomain:@"com.google.AppRTCMobile"
+ code:-1
+ userInfo:nil]];
+}
+
+#pragma mark - UITextFieldDelegate
+
+- (BOOL)textFieldShouldReturn:(UITextField *)textField {
+ [self userDidFinishSetup];
+ return YES;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastSetupUIInfo.plist b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastSetupUIInfo.plist
new file mode 100644
index 0000000000..a123c111e5
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastSetupUIInfo.plist
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>AppRTCMobile</string>
+ <key>CFBundleExecutable</key>
+ <string>$(EXECUTABLE_NAME)</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.AppRTCMobile.BroadcastSetupUI</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>$(PRODUCT_NAME)</string>
+ <key>CFBundlePackageType</key>
+ <string>XPC!</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>NSExtension</key>
+ <dict>
+ <key>NSExtensionAttributes</key>
+ <dict>
+ <key>NSExtensionActivationRule</key>
+ <dict>
+ <key>NSExtensionActivationSupportsReplayKitStreaming</key>
+ <true/>
+ </dict>
+ </dict>
+ <key>NSExtensionPointIdentifier</key>
+ <string>com.apple.broadcast-services-setupui</string>
+ <key>NSExtensionPrincipalClass</key>
+ <string>ARDBroadcastSetupViewController</string>
+ </dict>
+</dict>
+</plist>
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastUploadInfo.plist b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastUploadInfo.plist
new file mode 100644
index 0000000000..2bab60ea8f
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/broadcast_extension/BroadcastUploadInfo.plist
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>AppRTCMobile</string>
+ <key>CFBundleExecutable</key>
+ <string>$(EXECUTABLE_NAME)</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.AppRTCMobile.BroadcastUpload</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>$(PRODUCT_NAME)</string>
+ <key>CFBundlePackageType</key>
+ <string>XPC!</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>NSExtension</key>
+ <dict>
+ <key>NSExtensionPointIdentifier</key>
+ <string>com.apple.broadcast-services-upload</string>
+ <key>NSExtensionPrincipalClass</key>
+ <string>ARDBroadcastSampleHandler</string>
+ <key>RPBroadcastProcessMode</key>
+ <string>RPBroadcastProcessModeSampleBuffer</string>
+ </dict>
+</dict>
+</plist>
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/main.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/main.m
new file mode 100644
index 0000000000..00b83f7fd2
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/main.m
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+#import "ARDAppDelegate.h"
+
+int main(int argc, char* argv[]) {
+ @autoreleasepool {
+ return UIApplicationMain(
+ argc, argv, nil, NSStringFromClass([ARDAppDelegate class]));
+ }
+}
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/Roboto-Regular.ttf b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/Roboto-Regular.ttf
new file mode 100644
index 0000000000..0e58508a64
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/Roboto-Regular.ttf
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/foreman.mp4 b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/foreman.mp4
new file mode 100644
index 0000000000..ccffbf4722
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/foreman.mp4
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone5@2x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone5@2x.png
new file mode 100644
index 0000000000..9d005fde06
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone5@2x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6@2x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6@2x.png
new file mode 100644
index 0000000000..fce3eb95b3
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6@2x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6p@3x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6p@3x.png
new file mode 100644
index 0000000000..aee20c2209
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/iPhone6p@3x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp.png
new file mode 100644
index 0000000000..531cb0f280
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp@2x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp@2x.png
new file mode 100644
index 0000000000..03dd381c10
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_call_end_black_24dp@2x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp.png
new file mode 100644
index 0000000000..4ebf8a2270
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp@2x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp@2x.png
new file mode 100644
index 0000000000..ed2b2525fd
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_clear_black_24dp@2x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp.png
new file mode 100644
index 0000000000..c59419c02b
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp@2x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp@2x.png
new file mode 100644
index 0000000000..e84e188a1d
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_settings_black_24dp@2x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp.png
new file mode 100644
index 0000000000..8f3343d3a7
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp@2x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp@2x.png
new file mode 100644
index 0000000000..764880467a
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_surround_sound_black_24dp@2x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp.png
new file mode 100644
index 0000000000..85271c8253
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp@2x.png b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp@2x.png
new file mode 100644
index 0000000000..62b13a6a09
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/ic_switch_video_black_24dp@2x.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/mozart.mp3 b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/mozart.mp3
new file mode 100644
index 0000000000..5981ba3a91
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/ios/resources/mozart.mp3
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.h
new file mode 100644
index 0000000000..2b3ce094a2
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.h
@@ -0,0 +1,14 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Cocoa/Cocoa.h>
+
+@interface APPRTCAppDelegate : NSObject <NSApplicationDelegate>
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.m
new file mode 100644
index 0000000000..36a470021d
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.m
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "APPRTCAppDelegate.h"
+#import "APPRTCViewController.h"
+#import "sdk/objc/api/peerconnection/RTCSSLAdapter.h"
+
+@interface APPRTCAppDelegate () <NSWindowDelegate>
+@end
+
+@implementation APPRTCAppDelegate {
+ APPRTCViewController* _viewController;
+ NSWindow* _window;
+}
+
+#pragma mark - NSApplicationDelegate
+
+- (void)applicationDidFinishLaunching:(NSNotification*)notification {
+ RTCInitializeSSL();
+ NSScreen* screen = [NSScreen mainScreen];
+ NSRect visibleRect = [screen visibleFrame];
+ NSRect windowRect = NSMakeRect(NSMidX(visibleRect),
+ NSMidY(visibleRect),
+ 1320,
+ 1140);
+ NSUInteger styleMask = NSWindowStyleMaskTitled | NSWindowStyleMaskClosable;
+ _window = [[NSWindow alloc] initWithContentRect:windowRect
+ styleMask:styleMask
+ backing:NSBackingStoreBuffered
+ defer:NO];
+ _window.delegate = self;
+ [_window makeKeyAndOrderFront:self];
+ [_window makeMainWindow];
+ _viewController = [[APPRTCViewController alloc] initWithNibName:nil
+ bundle:nil];
+ [_window setContentView:[_viewController view]];
+}
+
+#pragma mark - NSWindow
+
+- (void)windowWillClose:(NSNotification*)notification {
+ [_viewController windowWillClose:notification];
+ RTCCleanupSSL();
+ [NSApp terminate:self];
+}
+
+@end
+
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.h
new file mode 100644
index 0000000000..306ecd9c7f
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AppKit/AppKit.h>
+
+@interface APPRTCViewController : NSViewController
+
+- (void)windowWillClose:(NSNotification*)notification;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.m
new file mode 100644
index 0000000000..982fa56b43
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.m
@@ -0,0 +1,407 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "APPRTCViewController.h"
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "sdk/objc/api/peerconnection/RTCVideoTrack.h"
+#import "sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h"
+
+#import "ARDAppClient.h"
+#import "ARDCaptureController.h"
+#import "ARDSettingsModel.h"
+
+static NSUInteger const kContentWidth = 900;
+static NSUInteger const kRoomFieldWidth = 200;
+static NSUInteger const kActionItemHeight = 30;
+static NSUInteger const kBottomViewHeight = 200;
+
+@class APPRTCMainView;
+@protocol APPRTCMainViewDelegate
+
+- (void)appRTCMainView:(APPRTCMainView*)mainView
+ didEnterRoomId:(NSString*)roomId
+ loopback:(BOOL)isLoopback;
+
+@end
+
+@interface APPRTCMainView : NSView
+
+@property(nonatomic, weak) id<APPRTCMainViewDelegate> delegate;
+@property(nonatomic, readonly) NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* localVideoView;
+@property(nonatomic, readonly) NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* remoteVideoView;
+@property(nonatomic, readonly) NSTextView* logView;
+
+- (void)displayLogMessage:(NSString*)message;
+
+@end
+
+@interface APPRTCMainView () <NSTextFieldDelegate, RTC_OBJC_TYPE (RTCVideoViewDelegate)>
+@end
+@implementation APPRTCMainView {
+ NSScrollView* _scrollView;
+ NSView* _actionItemsView;
+ NSButton* _connectButton;
+ NSButton* _loopbackButton;
+ NSTextField* _roomField;
+ CGSize _localVideoSize;
+ CGSize _remoteVideoSize;
+}
+
+@synthesize delegate = _delegate;
+@synthesize localVideoView = _localVideoView;
+@synthesize remoteVideoView = _remoteVideoView;
+@synthesize logView = _logView;
+
+- (void)displayLogMessage:(NSString *)message {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ self.logView.string = [NSString stringWithFormat:@"%@%@\n", self.logView.string, message];
+ NSRange range = NSMakeRange(self.logView.string.length, 0);
+ [self.logView scrollRangeToVisible:range];
+ });
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithFrame:(NSRect)frame {
+ if (self = [super initWithFrame:frame]) {
+ [self setupViews];
+ }
+ return self;
+}
+
++ (BOOL)requiresConstraintBasedLayout {
+ return YES;
+}
+
+- (void)updateConstraints {
+ NSParameterAssert(
+ _roomField != nil &&
+ _scrollView != nil &&
+ _remoteVideoView != nil &&
+ _localVideoView != nil &&
+ _actionItemsView!= nil &&
+ _connectButton != nil &&
+ _loopbackButton != nil);
+
+ [self removeConstraints:[self constraints]];
+ NSDictionary* viewsDictionary =
+ NSDictionaryOfVariableBindings(_roomField,
+ _scrollView,
+ _remoteVideoView,
+ _localVideoView,
+ _actionItemsView,
+ _connectButton,
+ _loopbackButton);
+
+ NSSize remoteViewSize = [self remoteVideoViewSize];
+ NSDictionary* metrics = @{
+ @"remoteViewWidth" : @(remoteViewSize.width),
+ @"remoteViewHeight" : @(remoteViewSize.height),
+ @"kBottomViewHeight" : @(kBottomViewHeight),
+ @"localViewHeight" : @(remoteViewSize.height / 3),
+ @"localViewWidth" : @(remoteViewSize.width / 3),
+ @"kRoomFieldWidth" : @(kRoomFieldWidth),
+ @"kActionItemHeight" : @(kActionItemHeight)
+ };
+ // Declare this separately to avoid compiler warning about splitting string
+ // within an NSArray expression.
+ NSString* verticalConstraintLeft =
+ @"V:|-[_remoteVideoView(remoteViewHeight)]-[_scrollView(kBottomViewHeight)]-|";
+ NSString* verticalConstraintRight =
+ @"V:|-[_remoteVideoView(remoteViewHeight)]-[_actionItemsView(kBottomViewHeight)]-|";
+ NSArray* constraintFormats = @[
+ verticalConstraintLeft,
+ verticalConstraintRight,
+ @"H:|-[_remoteVideoView(remoteViewWidth)]-|",
+ @"V:|-[_localVideoView(localViewHeight)]",
+ @"H:|-[_localVideoView(localViewWidth)]",
+ @"H:|-[_scrollView(==_actionItemsView)]-[_actionItemsView]-|"
+ ];
+
+ NSArray* actionItemsConstraints = @[
+ @"H:|-[_roomField(kRoomFieldWidth)]-[_loopbackButton(kRoomFieldWidth)]",
+ @"H:|-[_connectButton(kRoomFieldWidth)]",
+ @"V:|-[_roomField(kActionItemHeight)]-[_connectButton(kActionItemHeight)]",
+ @"V:|-[_loopbackButton(kActionItemHeight)]",
+ ];
+
+ [APPRTCMainView addConstraints:constraintFormats
+ toView:self
+ viewsDictionary:viewsDictionary
+ metrics:metrics];
+ [APPRTCMainView addConstraints:actionItemsConstraints
+ toView:_actionItemsView
+ viewsDictionary:viewsDictionary
+ metrics:metrics];
+ [super updateConstraints];
+}
+
+#pragma mark - Constraints helper
+
++ (void)addConstraints:(NSArray*)constraints toView:(NSView*)view
+ viewsDictionary:(NSDictionary*)viewsDictionary
+ metrics:(NSDictionary*)metrics {
+ for (NSString* constraintFormat in constraints) {
+ NSArray* constraints =
+ [NSLayoutConstraint constraintsWithVisualFormat:constraintFormat
+ options:0
+ metrics:metrics
+ views:viewsDictionary];
+ for (NSLayoutConstraint* constraint in constraints) {
+ [view addConstraint:constraint];
+ }
+ }
+}
+
+#pragma mark - Control actions
+
+- (void)startCall:(id)sender {
+ NSString* roomString = _roomField.stringValue;
+ // Generate room id for loopback options.
+ if (_loopbackButton.intValue && [roomString isEqualToString:@""]) {
+ roomString = [NSUUID UUID].UUIDString;
+ roomString = [roomString stringByReplacingOccurrencesOfString:@"-" withString:@""];
+ }
+ [self.delegate appRTCMainView:self
+ didEnterRoomId:roomString
+ loopback:_loopbackButton.intValue];
+ [self setNeedsUpdateConstraints:YES];
+}
+
+#pragma mark - RTCVideoViewDelegate
+
+- (void)videoView:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize:(CGSize)size {
+ if (videoView == _remoteVideoView) {
+ _remoteVideoSize = size;
+ } else if (videoView == _localVideoView) {
+ _localVideoSize = size;
+ } else {
+ return;
+ }
+
+ [self setNeedsUpdateConstraints:YES];
+}
+
+#pragma mark - Private
+
+- (void)setupViews {
+ NSParameterAssert([[self subviews] count] == 0);
+
+ _logView = [[NSTextView alloc] initWithFrame:NSZeroRect];
+ [_logView setMinSize:NSMakeSize(0, kBottomViewHeight)];
+ [_logView setMaxSize:NSMakeSize(FLT_MAX, FLT_MAX)];
+ [_logView setVerticallyResizable:YES];
+ [_logView setAutoresizingMask:NSViewWidthSizable];
+ NSTextContainer* textContainer = [_logView textContainer];
+ NSSize containerSize = NSMakeSize(kContentWidth, FLT_MAX);
+ [textContainer setContainerSize:containerSize];
+ [textContainer setWidthTracksTextView:YES];
+ [_logView setEditable:NO];
+
+ [self setupActionItemsView];
+
+ _scrollView = [[NSScrollView alloc] initWithFrame:NSZeroRect];
+ [_scrollView setTranslatesAutoresizingMaskIntoConstraints:NO];
+ [_scrollView setHasVerticalScroller:YES];
+ [_scrollView setDocumentView:_logView];
+ [self addSubview:_scrollView];
+
+ _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
+ _localVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
+
+ [_remoteVideoView setTranslatesAutoresizingMaskIntoConstraints:NO];
+ [self addSubview:_remoteVideoView];
+ [_localVideoView setTranslatesAutoresizingMaskIntoConstraints:NO];
+ [self addSubview:_localVideoView];
+}
+
+- (void)setupActionItemsView {
+ _actionItemsView = [[NSView alloc] initWithFrame:NSZeroRect];
+ [_actionItemsView setTranslatesAutoresizingMaskIntoConstraints:NO];
+ [self addSubview:_actionItemsView];
+
+ _roomField = [[NSTextField alloc] initWithFrame:NSZeroRect];
+ [_roomField setTranslatesAutoresizingMaskIntoConstraints:NO];
+ [[_roomField cell] setPlaceholderString: @"Enter AppRTC room id"];
+ [_actionItemsView addSubview:_roomField];
+ [_roomField setEditable:YES];
+
+ _connectButton = [[NSButton alloc] initWithFrame:NSZeroRect];
+ [_connectButton setTranslatesAutoresizingMaskIntoConstraints:NO];
+ _connectButton.title = @"Start call";
+ _connectButton.bezelStyle = NSBezelStyleSmallSquare;
+ _connectButton.target = self;
+ _connectButton.action = @selector(startCall:);
+ [_actionItemsView addSubview:_connectButton];
+
+ _loopbackButton = [[NSButton alloc] initWithFrame:NSZeroRect];
+ [_loopbackButton setTranslatesAutoresizingMaskIntoConstraints:NO];
+ _loopbackButton.title = @"Loopback";
+ [_loopbackButton setButtonType:NSButtonTypeSwitch];
+ [_actionItemsView addSubview:_loopbackButton];
+}
+
+- (NSSize)remoteVideoViewSize {
+ if (!_remoteVideoView.bounds.size.width) {
+ return NSMakeSize(kContentWidth, 0);
+ }
+ NSInteger width = MAX(_remoteVideoView.bounds.size.width, kContentWidth);
+ NSInteger height = (width/16) * 9;
+ return NSMakeSize(width, height);
+}
+
+@end
+
+@interface APPRTCViewController ()
+ <ARDAppClientDelegate, APPRTCMainViewDelegate>
+@property(nonatomic, readonly) APPRTCMainView* mainView;
+@end
+
+@implementation APPRTCViewController {
+ ARDAppClient* _client;
+ RTC_OBJC_TYPE(RTCVideoTrack) * _localVideoTrack;
+ RTC_OBJC_TYPE(RTCVideoTrack) * _remoteVideoTrack;
+ ARDCaptureController* _captureController;
+}
+
+- (void)dealloc {
+ [self disconnect];
+}
+
+- (void)viewDidAppear {
+ [super viewDidAppear];
+ [self displayUsageInstructions];
+}
+
+- (void)loadView {
+ APPRTCMainView* view = [[APPRTCMainView alloc] initWithFrame:NSZeroRect];
+ [view setTranslatesAutoresizingMaskIntoConstraints:NO];
+ view.delegate = self;
+ self.view = view;
+}
+
+- (void)windowWillClose:(NSNotification*)notification {
+ [self disconnect];
+}
+
+#pragma mark - Usage
+
+- (void)displayUsageInstructions {
+ [self.mainView displayLogMessage:
+ @"To start call:\n"
+ @"• Enter AppRTC room id (not neccessary for loopback)\n"
+ @"• Start call"];
+}
+
+#pragma mark - ARDAppClientDelegate
+
+- (void)appClient:(ARDAppClient *)client
+ didChangeState:(ARDAppClientState)state {
+ switch (state) {
+ case kARDAppClientStateConnected:
+ [self.mainView displayLogMessage:@"Client connected."];
+ break;
+ case kARDAppClientStateConnecting:
+ [self.mainView displayLogMessage:@"Client connecting."];
+ break;
+ case kARDAppClientStateDisconnected:
+ [self.mainView displayLogMessage:@"Client disconnected."];
+ [self resetUI];
+ _client = nil;
+ break;
+ }
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didChangeConnectionState:(RTCIceConnectionState)state {
+}
+
+- (void)appClient:(ARDAppClient*)client
+ didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
+ _captureController =
+ [[ARDCaptureController alloc] initWithCapturer:localCapturer
+ settings:[[ARDSettingsModel alloc] init]];
+ [_captureController startCapture];
+}
+
+- (void)appClient:(ARDAppClient*)client
+ didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack {
+ _localVideoTrack = localVideoTrack;
+ [_localVideoTrack addRenderer:self.mainView.localVideoView];
+}
+
+- (void)appClient:(ARDAppClient*)client
+ didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
+ _remoteVideoTrack = remoteVideoTrack;
+ [_remoteVideoTrack addRenderer:self.mainView.remoteVideoView];
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didError:(NSError *)error {
+ [self showAlertWithMessage:[NSString stringWithFormat:@"%@", error]];
+ [self disconnect];
+}
+
+- (void)appClient:(ARDAppClient *)client
+ didGetStats:(NSArray *)stats {
+}
+
+#pragma mark - APPRTCMainViewDelegate
+
+- (void)appRTCMainView:(APPRTCMainView*)mainView
+ didEnterRoomId:(NSString*)roomId
+ loopback:(BOOL)isLoopback {
+
+ if ([roomId isEqualToString:@""]) {
+ [self.mainView displayLogMessage:@"Missing room id"];
+ return;
+ }
+
+ [self disconnect];
+ ARDAppClient* client = [[ARDAppClient alloc] initWithDelegate:self];
+ [client connectToRoomWithId:roomId
+ settings:[[ARDSettingsModel alloc] init] // Use default settings.
+ isLoopback:isLoopback];
+ _client = client;
+}
+
+#pragma mark - Private
+
+- (APPRTCMainView*)mainView {
+ return (APPRTCMainView*)self.view;
+}
+
+- (void)showAlertWithMessage:(NSString*)message {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ NSAlert* alert = [[NSAlert alloc] init];
+ [alert setMessageText:message];
+ [alert runModal];
+ });
+}
+
+- (void)resetUI {
+ [_remoteVideoTrack removeRenderer:self.mainView.remoteVideoView];
+ [_localVideoTrack removeRenderer:self.mainView.localVideoView];
+ _remoteVideoTrack = nil;
+ _localVideoTrack = nil;
+ [self.mainView.remoteVideoView renderFrame:nil];
+ [self.mainView.localVideoView renderFrame:nil];
+}
+
+- (void)disconnect {
+ [self resetUI];
+ [_captureController stopCapture];
+ _captureController = nil;
+ [_client disconnect];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/Info.plist b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/Info.plist
new file mode 100644
index 0000000000..d2970eba74
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/Info.plist
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE plist PUBLIC "-//Apple/DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.Google.${PRODUCT_NAME:rfc1034identifier}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>LSMinimumSystemVersion</key>
+ <string>${MACOSX_DEPLOYMENT_TARGET}</string>
+ <key>NSPrincipalClass</key>
+ <string>NSApplication</string>
+ <key>NSCameraUsageDescription</key>
+ <string>Camera access needed for video calling</string>
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Microphone access needed for video calling</string>
+</dict>
+</plist> \ No newline at end of file
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/main.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/main.m
new file mode 100644
index 0000000000..79b17f5492
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/main.m
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AppKit/AppKit.h>
+
+#import "APPRTCAppDelegate.h"
+
+int main(int argc, char* argv[]) {
+ @autoreleasepool {
+ [NSApplication sharedApplication];
+ APPRTCAppDelegate* delegate = [[APPRTCAppDelegate alloc] init];
+ [NSApp setDelegate:delegate];
+ [NSApp run];
+ }
+}
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm
new file mode 100644
index 0000000000..2694e49914
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm
@@ -0,0 +1,266 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <OCMock/OCMock.h>
+#import <QuartzCore/CoreAnimation.h>
+#import <XCTest/XCTest.h>
+
+#include "rtc_base/ssl_adapter.h"
+
+#import "sdk/objc/api/peerconnection/RTCMediaConstraints.h"
+#import "sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h"
+
+#import "ARDAppClient+Internal.h"
+#import "ARDJoinResponse+Internal.h"
+#import "ARDMessageResponse+Internal.h"
+#import "ARDSettingsModel.h"
+
+@interface ARDAppClientTest : XCTestCase
+@end
+
+@implementation ARDAppClientTest
+
+#pragma mark - Mock helpers
+
+- (id)mockRoomServerClientForRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId
+ isInitiator:(BOOL)isInitiator
+ messages:(NSArray *)messages
+ messageHandler:
+ (void (^)(ARDSignalingMessage *))messageHandler {
+ id mockRoomServerClient =
+ [OCMockObject mockForProtocol:@protocol(ARDRoomServerClient)];
+
+ // Successful join response.
+ ARDJoinResponse *joinResponse = [[ARDJoinResponse alloc] init];
+ joinResponse.result = kARDJoinResultTypeSuccess;
+ joinResponse.roomId = roomId;
+ joinResponse.clientId = clientId;
+ joinResponse.isInitiator = isInitiator;
+ joinResponse.messages = messages;
+
+ // Successful message response.
+ ARDMessageResponse *messageResponse = [[ARDMessageResponse alloc] init];
+ messageResponse.result = kARDMessageResultTypeSuccess;
+
+ // Return join response from above on join.
+ [[[mockRoomServerClient stub] andDo:^(NSInvocation *invocation) {
+ __unsafe_unretained void (^completionHandler)(ARDJoinResponse *response,
+ NSError *error);
+ [invocation getArgument:&completionHandler atIndex:4];
+ completionHandler(joinResponse, nil);
+ }] joinRoomWithRoomId:roomId isLoopback:NO completionHandler:[OCMArg any]];
+
+ // Return message response from above on join.
+ [[[mockRoomServerClient stub] andDo:^(NSInvocation *invocation) {
+ __unsafe_unretained ARDSignalingMessage *message;
+ __unsafe_unretained void (^completionHandler)(ARDMessageResponse *response,
+ NSError *error);
+ [invocation getArgument:&message atIndex:2];
+ [invocation getArgument:&completionHandler atIndex:5];
+ messageHandler(message);
+ completionHandler(messageResponse, nil);
+ }] sendMessage:[OCMArg any]
+ forRoomId:roomId
+ clientId:clientId
+ completionHandler:[OCMArg any]];
+
+ // Do nothing on leave.
+ [[[mockRoomServerClient stub] andDo:^(NSInvocation *invocation) {
+ __unsafe_unretained void (^completionHandler)(NSError *error);
+ [invocation getArgument:&completionHandler atIndex:4];
+ if (completionHandler) {
+ completionHandler(nil);
+ }
+ }] leaveRoomWithRoomId:roomId
+ clientId:clientId
+ completionHandler:[OCMArg any]];
+
+ return mockRoomServerClient;
+}
+
+- (id)mockSignalingChannelForRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId
+ messageHandler:
+ (void (^)(ARDSignalingMessage *message))messageHandler {
+ id mockSignalingChannel =
+ [OCMockObject niceMockForProtocol:@protocol(ARDSignalingChannel)];
+ [[mockSignalingChannel stub] registerForRoomId:roomId clientId:clientId];
+ [[[mockSignalingChannel stub] andDo:^(NSInvocation *invocation) {
+ __unsafe_unretained ARDSignalingMessage *message;
+ [invocation getArgument:&message atIndex:2];
+ messageHandler(message);
+ }] sendMessage:[OCMArg any]];
+ return mockSignalingChannel;
+}
+
+- (id)mockTURNClient {
+ id mockTURNClient =
+ [OCMockObject mockForProtocol:@protocol(ARDTURNClient)];
+ [[[mockTURNClient stub] andDo:^(NSInvocation *invocation) {
+ // Don't return anything in TURN response.
+ __unsafe_unretained void (^completionHandler)(NSArray *turnServers,
+ NSError *error);
+ [invocation getArgument:&completionHandler atIndex:2];
+ completionHandler([NSArray array], nil);
+ }] requestServersWithCompletionHandler:[OCMArg any]];
+ return mockTURNClient;
+}
+
+- (id)mockSettingsModel {
+ ARDSettingsModel *model = [[ARDSettingsModel alloc] init];
+ id partialMock = [OCMockObject partialMockForObject:model];
+ [[[partialMock stub] andReturn:@[ @"640x480", @"960x540", @"1280x720" ]]
+ availableVideoResolutions];
+
+ return model;
+}
+
+- (ARDAppClient *)createAppClientForRoomId:(NSString *)roomId
+ clientId:(NSString *)clientId
+ isInitiator:(BOOL)isInitiator
+ messages:(NSArray *)messages
+ messageHandler:
+ (void (^)(ARDSignalingMessage *message))messageHandler
+ connectedHandler:(void (^)(void))connectedHandler
+ localVideoTrackHandler:(void (^)(void))localVideoTrackHandler {
+ id turnClient = [self mockTURNClient];
+ id signalingChannel = [self mockSignalingChannelForRoomId:roomId
+ clientId:clientId
+ messageHandler:messageHandler];
+ id roomServerClient =
+ [self mockRoomServerClientForRoomId:roomId
+ clientId:clientId
+ isInitiator:isInitiator
+ messages:messages
+ messageHandler:messageHandler];
+ id delegate =
+ [OCMockObject niceMockForProtocol:@protocol(ARDAppClientDelegate)];
+ [[[delegate stub] andDo:^(NSInvocation *invocation) {
+ connectedHandler();
+ }] appClient:[OCMArg any]
+ didChangeConnectionState:RTCIceConnectionStateConnected];
+ [[[delegate stub] andDo:^(NSInvocation *invocation) {
+ localVideoTrackHandler();
+ }] appClient:[OCMArg any]
+ didReceiveLocalVideoTrack:[OCMArg any]];
+
+ return [[ARDAppClient alloc] initWithRoomServerClient:roomServerClient
+ signalingChannel:signalingChannel
+ turnClient:turnClient
+ delegate:delegate];
+}
+
+#pragma mark - Cases
+
+// Tests that an ICE connection is established between two ARDAppClient objects
+// where one is set up as a caller and the other the answerer. Network
+// components are mocked out and messages are relayed directly from object to
+// object. It's expected that both clients reach the
+// RTCIceConnectionStateConnected state within a reasonable amount of time.
+- (void)testSession {
+ // Need block arguments here because we're setting up a callbacks before we
+ // create the clients.
+ ARDAppClient *caller = nil;
+ ARDAppClient *answerer = nil;
+ __block __weak ARDAppClient *weakCaller = nil;
+ __block __weak ARDAppClient *weakAnswerer = nil;
+ NSString *roomId = @"testRoom";
+ NSString *callerId = @"testCallerId";
+ NSString *answererId = @"testAnswererId";
+
+ XCTestExpectation *callerConnectionExpectation =
+ [self expectationWithDescription:@"Caller PC connected"];
+ XCTestExpectation *answererConnectionExpectation =
+ [self expectationWithDescription:@"Answerer PC connected"];
+
+ caller = [self createAppClientForRoomId:roomId
+ clientId:callerId
+ isInitiator:YES
+ messages:[NSArray array]
+ messageHandler:^(ARDSignalingMessage *message) {
+ ARDAppClient *strongAnswerer = weakAnswerer;
+ [strongAnswerer channel:strongAnswerer.channel didReceiveMessage:message];
+ } connectedHandler:^{
+ [callerConnectionExpectation fulfill];
+ } localVideoTrackHandler:^{
+ }];
+ // TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion
+ // crash in Debug.
+ caller.defaultPeerConnectionConstraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
+ optionalConstraints:nil];
+ weakCaller = caller;
+
+ answerer = [self createAppClientForRoomId:roomId
+ clientId:answererId
+ isInitiator:NO
+ messages:[NSArray array]
+ messageHandler:^(ARDSignalingMessage *message) {
+ ARDAppClient *strongCaller = weakCaller;
+ [strongCaller channel:strongCaller.channel didReceiveMessage:message];
+ } connectedHandler:^{
+ [answererConnectionExpectation fulfill];
+ } localVideoTrackHandler:^{
+ }];
+ // TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion
+ // crash in Debug.
+ answerer.defaultPeerConnectionConstraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
+ optionalConstraints:nil];
+ weakAnswerer = answerer;
+
+ // Kick off connection.
+ [caller connectToRoomWithId:roomId settings:[self mockSettingsModel] isLoopback:NO];
+ [answerer connectToRoomWithId:roomId settings:[self mockSettingsModel] isLoopback:NO];
+ [self waitForExpectationsWithTimeout:20 handler:^(NSError *error) {
+ if (error) {
+ XCTFail(@"Expectation failed with error %@.", error);
+ }
+ }];
+}
+
+// Test to see that we get a local video connection
+// Note this will currently pass even when no camera is connected as a local
+// video track is created regardless (Perhaps there should be a test for that...)
+#if !TARGET_IPHONE_SIMULATOR // Expect to fail on simulator due to no camera support
+- (void)testSessionShouldGetLocalVideoTrackCallback {
+ ARDAppClient *caller = nil;
+ NSString *roomId = @"testRoom";
+ NSString *callerId = @"testCallerId";
+
+ XCTestExpectation *localVideoTrackExpectation =
+ [self expectationWithDescription:@"Caller got local video."];
+
+ caller = [self createAppClientForRoomId:roomId
+ clientId:callerId
+ isInitiator:YES
+ messages:[NSArray array]
+ messageHandler:^(ARDSignalingMessage *message) {}
+ connectedHandler:^{}
+ localVideoTrackHandler:^{ [localVideoTrackExpectation fulfill]; }];
+ caller.defaultPeerConnectionConstraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
+ optionalConstraints:nil];
+
+ // Kick off connection.
+ [caller connectToRoomWithId:roomId
+ settings:[self mockSettingsModel]
+ isLoopback:NO];
+ [self waitForExpectationsWithTimeout:20 handler:^(NSError *error) {
+ if (error) {
+ XCTFail("Expectation timed out with error: %@.", error);
+ }
+ }];
+}
+#endif
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm
new file mode 100644
index 0000000000..2e39834190
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <OCMock/OCMock.h>
+#import <XCTest/XCTest.h>
+
+#import "ARDFileCaptureController.h"
+
+#import "sdk/objc/components/capturer/RTCFileVideoCapturer.h"
+
+NS_CLASS_AVAILABLE_IOS(10)
+@interface ARDFileCaptureControllerTests : XCTestCase
+
+@property(nonatomic, strong) ARDFileCaptureController *fileCaptureController;
+@property(nonatomic, strong) id fileCapturerMock;
+
+@end
+
+@implementation ARDFileCaptureControllerTests
+
+@synthesize fileCaptureController = _fileCaptureController;
+@synthesize fileCapturerMock = _fileCapturerMock;
+
+- (void)setUp {
+ [super setUp];
+ self.fileCapturerMock = OCMClassMock([RTC_OBJC_TYPE(RTCFileVideoCapturer) class]);
+ self.fileCaptureController =
+ [[ARDFileCaptureController alloc] initWithCapturer:self.fileCapturerMock];
+}
+
+- (void)tearDown {
+ self.fileCaptureController = nil;
+ [self.fileCapturerMock stopMocking];
+ self.fileCapturerMock = nil;
+ [super tearDown];
+}
+
+- (void)testCaptureIsStarted {
+ [[self.fileCapturerMock expect] startCapturingFromFileNamed:[OCMArg any] onError:[OCMArg any]];
+
+ [self.fileCaptureController startCapture];
+
+ [self.fileCapturerMock verify];
+}
+
+- (void)testCaptureIsStoped {
+ [[self.fileCapturerMock expect] stopCapture];
+
+ [self.fileCaptureController stopCapture];
+
+ [self.fileCapturerMock verify];
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDSettingsModel_xctest.mm b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDSettingsModel_xctest.mm
new file mode 100644
index 0000000000..dc62798963
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/ARDSettingsModel_xctest.mm
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <OCMock/OCMock.h>
+#import <XCTest/XCTest.h>
+
+#import "sdk/objc/api/peerconnection/RTCMediaConstraints.h"
+
+#import "ARDSettingsModel+Private.h"
+#import "ARDSettingsStore.h"
+
+
+@interface ARDSettingsModelTests : XCTestCase {
+ ARDSettingsModel *_model;
+}
+@end
+
+@implementation ARDSettingsModelTests
+
+- (id)setupMockStore {
+ id storeMock = [OCMockObject mockForClass:[ARDSettingsStore class]];
+
+ id partialMock = [OCMockObject partialMockForObject:_model];
+ [[[partialMock stub] andReturn:storeMock] settingsStore];
+ [[[partialMock stub] andReturn:@[ @"640x480", @"960x540", @"1280x720" ]]
+ availableVideoResolutions];
+
+ return storeMock;
+}
+
+- (void)setUp {
+ _model = [[ARDSettingsModel alloc] init];
+}
+
+- (void)testRetrievingSetting {
+ id storeMock = [self setupMockStore];
+ [[[storeMock expect] andReturn:@"640x480"] videoResolution];
+ NSString *string = [_model currentVideoResolutionSettingFromStore];
+
+ XCTAssertEqualObjects(string, @"640x480");
+}
+
+- (void)testStoringInvalidConstraintReturnsNo {
+ id storeMock = [self setupMockStore];
+ [([[storeMock stub] andReturn:@"960x480"])videoResolution];
+ XCTAssertFalse([_model storeVideoResolutionSetting:@"960x480"]);
+}
+
+- (void)testWidthConstraintFromStore {
+ id storeMock = [self setupMockStore];
+ [([[storeMock stub] andReturn:@"1270x480"])videoResolution];
+ int width = [_model currentVideoResolutionWidthFromStore];
+
+ XCTAssertEqual(width, 1270);
+}
+
+- (void)testHeightConstraintFromStore {
+ id storeMock = [self setupMockStore];
+ [([[storeMock stub] andReturn:@"960x540"])videoResolution];
+ int height = [_model currentVideoResolutionHeightFromStore];
+
+ XCTAssertEqual(height, 540);
+}
+
+- (void)testConstraintComponentIsNilWhenInvalidConstraintString {
+ id storeMock = [self setupMockStore];
+ [([[storeMock stub] andReturn:@"invalid"])videoResolution];
+ int width = [_model currentVideoResolutionWidthFromStore];
+
+ XCTAssertEqual(width, 0);
+}
+
+- (void)testStoringAudioSetting {
+ id storeMock = [self setupMockStore];
+ [[storeMock expect] setAudioOnly:YES];
+
+ [_model storeAudioOnlySetting:YES];
+ [storeMock verify];
+}
+
+- (void)testReturningDefaultCallOption {
+ id storeMock = [self setupMockStore];
+ [[[storeMock stub] andReturnValue:@YES] useManualAudioConfig];
+
+ XCTAssertTrue([_model currentUseManualAudioConfigSettingFromStore]);
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/main.mm b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/main.mm
new file mode 100644
index 0000000000..3625ffd7bf
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/tests/main.mm
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+#include "test/ios/coverage_util_ios.h"
+
+int main(int argc, char* argv[]) {
+ rtc::test::ConfigureCoverageReportPath();
+
+ @autoreleasepool {
+ return UIApplicationMain(argc, argv, nil, nil);
+ }
+}
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/LICENSE b/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/LICENSE
new file mode 100644
index 0000000000..c01a79c3bd
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/LICENSE
@@ -0,0 +1,15 @@
+
+ Copyright 2012 Square Inc.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.h b/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.h
new file mode 100644
index 0000000000..a230646073
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.h
@@ -0,0 +1,135 @@
+//
+// Copyright 2012 Square Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+#import <Foundation/Foundation.h>
+#import <Security/SecCertificate.h>
+
+typedef enum {
+ SR_CONNECTING = 0,
+ SR_OPEN = 1,
+ SR_CLOSING = 2,
+ SR_CLOSED = 3,
+} SRReadyState;
+
+typedef enum SRStatusCode : NSInteger {
+ SRStatusCodeNormal = 1000,
+ SRStatusCodeGoingAway = 1001,
+ SRStatusCodeProtocolError = 1002,
+ SRStatusCodeUnhandledType = 1003,
+ // 1004 reserved.
+ SRStatusNoStatusReceived = 1005,
+ // 1004-1006 reserved.
+ SRStatusCodeInvalidUTF8 = 1007,
+ SRStatusCodePolicyViolated = 1008,
+ SRStatusCodeMessageTooBig = 1009,
+} SRStatusCode;
+
+@class SRWebSocket;
+
+extern NSString *const SRWebSocketErrorDomain;
+extern NSString *const SRHTTPResponseErrorKey;
+
+#pragma mark - SRWebSocketDelegate
+
+@protocol SRWebSocketDelegate;
+
+#pragma mark - SRWebSocket
+
+@interface SRWebSocket : NSObject <NSStreamDelegate>
+
+@property(nonatomic, weak) id<SRWebSocketDelegate> delegate;
+
+@property(nonatomic, readonly) SRReadyState readyState;
+@property(nonatomic, readonly, retain) NSURL *url;
+
+// This returns the negotiated protocol.
+// It will be nil until after the handshake completes.
+@property(nonatomic, readonly, copy) NSString *protocol;
+
+// Protocols should be an array of strings that turn into Sec-WebSocket-Protocol.
+- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols;
+- (id)initWithURLRequest:(NSURLRequest *)request;
+
+// Some helper constructors.
+- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols;
+- (id)initWithURL:(NSURL *)url;
+
+// Delegate queue will be dispatch_main_queue by default.
+// You cannot set both OperationQueue and dispatch_queue.
+- (void)setDelegateOperationQueue:(NSOperationQueue *)queue;
+- (void)setDelegateDispatchQueue:(dispatch_queue_t)queue;
+
+// By default, it will schedule itself on +[NSRunLoop SR_networkRunLoop] using defaultModes.
+- (void)scheduleInRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode;
+- (void)unscheduleFromRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode;
+
+// SRWebSockets are intended for one-time-use only. Open should be called once and only once.
+- (void)open;
+
+- (void)close;
+- (void)closeWithCode:(NSInteger)code reason:(NSString *)reason;
+
+// Send a UTF8 String or Data.
+- (void)send:(id)data;
+
+// Send Data (can be nil) in a ping message.
+- (void)sendPing:(NSData *)data;
+
+@end
+
+#pragma mark - SRWebSocketDelegate
+
+@protocol SRWebSocketDelegate <NSObject>
+
+// message will either be an NSString if the server is using text
+// or NSData if the server is using binary.
+- (void)webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message;
+
+@optional
+
+- (void)webSocketDidOpen:(SRWebSocket *)webSocket;
+- (void)webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error;
+- (void)webSocket:(SRWebSocket *)webSocket
+ didCloseWithCode:(NSInteger)code
+ reason:(NSString *)reason
+ wasClean:(BOOL)wasClean;
+- (void)webSocket:(SRWebSocket *)webSocket didReceivePong:(NSData *)pongPayload;
+
+@end
+
+#pragma mark - NSURLRequest (CertificateAdditions)
+
+@interface NSURLRequest (CertificateAdditions)
+
+@property(nonatomic, retain, readonly) NSArray *SR_SSLPinnedCertificates;
+
+@end
+
+#pragma mark - NSMutableURLRequest (CertificateAdditions)
+
+@interface NSMutableURLRequest (CertificateAdditions)
+
+@property(nonatomic, retain) NSArray *SR_SSLPinnedCertificates;
+
+@end
+
+#pragma mark - NSRunLoop (SRWebSocket)
+
+@interface NSRunLoop (SRWebSocket)
+
++ (NSRunLoop *)SR_networkRunLoop;
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m
new file mode 100644
index 0000000000..ab0d1b89bc
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m
@@ -0,0 +1,1774 @@
+//
+// Copyright 2012 Square Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+
+#import "SRWebSocket.h"
+
+#if TARGET_OS_IPHONE
+#define HAS_ICU
+#endif
+
+#ifdef HAS_ICU
+#import <unicode/utf8.h>
+#endif
+
+#if TARGET_OS_IPHONE
+#import <Endian.h>
+#else
+#import <CoreServices/CoreServices.h>
+#endif
+
+#import <CommonCrypto/CommonDigest.h>
+#import <Security/SecRandom.h>
+
+#if OS_OBJECT_USE_OBJC_RETAIN_RELEASE
+#define sr_dispatch_retain(x)
+#define sr_dispatch_release(x)
+#define maybe_bridge(x) ((__bridge void *) x)
+#else
+#define sr_dispatch_retain(x) dispatch_retain(x)
+#define sr_dispatch_release(x) dispatch_release(x)
+#define maybe_bridge(x) (x)
+#endif
+
+#if !__has_feature(objc_arc)
+#error SocketRocket must be compiled with ARC enabled
+#endif
+
+
+typedef enum {
+ SROpCodeTextFrame = 0x1,
+ SROpCodeBinaryFrame = 0x2,
+ // 3-7 reserved.
+ SROpCodeConnectionClose = 0x8,
+ SROpCodePing = 0x9,
+ SROpCodePong = 0xA,
+ // B-F reserved.
+} SROpCode;
+
+typedef struct {
+ BOOL fin;
+// BOOL rsv1;
+// BOOL rsv2;
+// BOOL rsv3;
+ uint8_t opcode;
+ BOOL masked;
+ uint64_t payload_length;
+} frame_header;
+
+static NSString *const SRWebSocketAppendToSecKeyString = @"258EAFA5-E914-47DA-95CA-C5AB0DC85B11";
+
+static inline int32_t validate_dispatch_data_partial_string(NSData *data);
+static inline void SRFastLog(NSString *format, ...);
+
+@interface NSData (SRWebSocket)
+
+- (NSString *)stringBySHA1ThenBase64Encoding;
+
+@end
+
+
+@interface NSString (SRWebSocket)
+
+- (NSString *)stringBySHA1ThenBase64Encoding;
+
+@end
+
+
+@interface NSURL (SRWebSocket)
+
+// The origin isn't really applicable for a native application.
+// So instead, just map ws -> http and wss -> https.
+- (NSString *)SR_origin;
+
+@end
+
+
+@interface _SRRunLoopThread : NSThread
+
+@property (nonatomic, readonly) NSRunLoop *runLoop;
+
+@end
+
+
+static NSString *newSHA1String(const char *bytes, size_t length) {
+ uint8_t md[CC_SHA1_DIGEST_LENGTH];
+
+ assert(length >= 0);
+ assert(length <= UINT32_MAX);
+ CC_SHA1(bytes, (CC_LONG)length, md);
+
+ NSData *data = [NSData dataWithBytes:md length:CC_SHA1_DIGEST_LENGTH];
+
+ if ([data respondsToSelector:@selector(base64EncodedStringWithOptions:)]) {
+ return [data base64EncodedStringWithOptions:0];
+ }
+
+ return [data base64Encoding];
+}
+
+@implementation NSData (SRWebSocket)
+
+- (NSString *)stringBySHA1ThenBase64Encoding;
+{
+ return newSHA1String(self.bytes, self.length);
+}
+
+@end
+
+
+@implementation NSString (SRWebSocket)
+
+- (NSString *)stringBySHA1ThenBase64Encoding;
+{
+ return newSHA1String(self.UTF8String, self.length);
+}
+
+@end
+
+NSString *const SRWebSocketErrorDomain = @"SRWebSocketErrorDomain";
+NSString *const SRHTTPResponseErrorKey = @"HTTPResponseStatusCode";
+
+// Returns number of bytes consumed. Returning 0 means you didn't match.
+// Sends bytes to callback handler;
+typedef size_t (^stream_scanner)(NSData *collected_data);
+
+typedef void (^data_callback)(SRWebSocket *webSocket, NSData *data);
+
+@interface SRIOConsumer : NSObject {
+ stream_scanner _scanner;
+ data_callback _handler;
+ size_t _bytesNeeded;
+ BOOL _readToCurrentFrame;
+ BOOL _unmaskBytes;
+}
+@property (nonatomic, copy, readonly) stream_scanner consumer;
+@property (nonatomic, copy, readonly) data_callback handler;
+@property (nonatomic, assign) size_t bytesNeeded;
+@property (nonatomic, assign, readonly) BOOL readToCurrentFrame;
+@property (nonatomic, assign, readonly) BOOL unmaskBytes;
+
+@end
+
+// This class is not thread-safe, and is expected to always be run on the same queue.
+@interface SRIOConsumerPool : NSObject
+
+- (id)initWithBufferCapacity:(NSUInteger)poolSize;
+
+- (SRIOConsumer *)consumerWithScanner:(stream_scanner)scanner handler:(data_callback)handler bytesNeeded:(size_t)bytesNeeded readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes;
+- (void)returnConsumer:(SRIOConsumer *)consumer;
+
+@end
+
+@interface SRWebSocket () <NSStreamDelegate>
+
+- (void)_writeData:(NSData *)data;
+- (void)_closeWithProtocolError:(NSString *)message;
+- (void)_failWithError:(NSError *)error;
+
+- (void)_disconnect;
+
+- (void)_readFrameNew;
+- (void)_readFrameContinue;
+
+- (void)_pumpScanner;
+
+- (void)_pumpWriting;
+
+- (void)_addConsumerWithScanner:(stream_scanner)consumer callback:(data_callback)callback;
+- (void)_addConsumerWithDataLength:(size_t)dataLength callback:(data_callback)callback readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes;
+- (void)_addConsumerWithScanner:(stream_scanner)consumer callback:(data_callback)callback dataLength:(size_t)dataLength;
+- (void)_readUntilBytes:(const void *)bytes length:(size_t)length callback:(data_callback)dataHandler;
+- (void)_readUntilHeaderCompleteWithCallback:(data_callback)dataHandler;
+
+- (void)_sendFrameWithOpcode:(SROpCode)opcode data:(id)data;
+
+- (BOOL)_checkHandshake:(CFHTTPMessageRef)httpMessage;
+- (void)_SR_commonInit;
+
+- (void)_initializeStreams;
+- (void)_connect;
+
+@property (nonatomic) SRReadyState readyState;
+
+@property (nonatomic) NSOperationQueue *delegateOperationQueue;
+@property (nonatomic) dispatch_queue_t delegateDispatchQueue;
+
+@end
+
+
+@implementation SRWebSocket {
+ NSInteger _webSocketVersion;
+
+ NSOperationQueue *_delegateOperationQueue;
+ dispatch_queue_t _delegateDispatchQueue;
+
+ dispatch_queue_t _workQueue;
+ NSMutableArray *_consumers;
+
+ NSInputStream *_inputStream;
+ NSOutputStream *_outputStream;
+
+ NSMutableData *_readBuffer;
+ NSUInteger _readBufferOffset;
+
+ NSMutableData *_outputBuffer;
+ NSUInteger _outputBufferOffset;
+
+ uint8_t _currentFrameOpcode;
+ size_t _currentFrameCount;
+ size_t _readOpCount;
+ uint32_t _currentStringScanPosition;
+ NSMutableData *_currentFrameData;
+
+ NSString *_closeReason;
+
+ NSString *_secKey;
+
+ BOOL _pinnedCertFound;
+
+ uint8_t _currentReadMaskKey[4];
+ size_t _currentReadMaskOffset;
+
+ BOOL _consumerStopped;
+
+ BOOL _closeWhenFinishedWriting;
+ BOOL _failed;
+
+ BOOL _secure;
+ NSURLRequest *_urlRequest;
+
+ CFHTTPMessageRef _receivedHTTPHeaders;
+
+ BOOL _sentClose;
+ BOOL _didFail;
+ int _closeCode;
+
+ BOOL _isPumping;
+
+ NSMutableSet *_scheduledRunloops;
+
+ // We use this to retain ourselves.
+ __strong SRWebSocket *_selfRetain;
+
+ NSArray *_requestedProtocols;
+ SRIOConsumerPool *_consumerPool;
+}
+
+@synthesize delegate = _delegate;
+@synthesize url = _url;
+@synthesize readyState = _readyState;
+@synthesize protocol = _protocol;
+
+static __strong NSData *CRLFCRLF;
+
++ (void)initialize;
+{
+ CRLFCRLF = [[NSData alloc] initWithBytes:"\r\n\r\n" length:4];
+}
+
+- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols;
+{
+ self = [super init];
+ if (self) {
+ assert(request.URL);
+ _url = request.URL;
+ _urlRequest = request;
+
+ _requestedProtocols = [protocols copy];
+
+ [self _SR_commonInit];
+ }
+
+ return self;
+}
+
+- (id)initWithURLRequest:(NSURLRequest *)request;
+{
+ return [self initWithURLRequest:request protocols:nil];
+}
+
+- (id)initWithURL:(NSURL *)url;
+{
+ return [self initWithURL:url protocols:nil];
+}
+
+- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols;
+{
+ NSMutableURLRequest *request = [[NSMutableURLRequest alloc] initWithURL:url];
+ return [self initWithURLRequest:request protocols:protocols];
+}
+
+- (void)_SR_commonInit;
+{
+
+ NSString *scheme = _url.scheme.lowercaseString;
+ assert([scheme isEqualToString:@"ws"] || [scheme isEqualToString:@"http"] || [scheme isEqualToString:@"wss"] || [scheme isEqualToString:@"https"]);
+
+ if ([scheme isEqualToString:@"wss"] || [scheme isEqualToString:@"https"]) {
+ _secure = YES;
+ }
+
+ _readyState = SR_CONNECTING;
+ _consumerStopped = YES;
+ _webSocketVersion = 13;
+
+ _workQueue = dispatch_queue_create(NULL, DISPATCH_QUEUE_SERIAL);
+
+ // Going to set a specific on the queue so we can validate we're on the work queue
+ dispatch_queue_set_specific(_workQueue, (__bridge void *)self, maybe_bridge(_workQueue), NULL);
+
+ _delegateDispatchQueue = dispatch_get_main_queue();
+ sr_dispatch_retain(_delegateDispatchQueue);
+
+ _readBuffer = [[NSMutableData alloc] init];
+ _outputBuffer = [[NSMutableData alloc] init];
+
+ _currentFrameData = [[NSMutableData alloc] init];
+
+ _consumers = [[NSMutableArray alloc] init];
+
+ _consumerPool = [[SRIOConsumerPool alloc] init];
+
+ _scheduledRunloops = [[NSMutableSet alloc] init];
+
+ [self _initializeStreams];
+
+ // default handlers
+}
+
+- (void)assertOnWorkQueue;
+{
+ assert(dispatch_get_specific((__bridge void *)self) == maybe_bridge(_workQueue));
+}
+
+- (void)dealloc
+{
+ _inputStream.delegate = nil;
+ _outputStream.delegate = nil;
+
+ [_inputStream close];
+ [_outputStream close];
+
+ sr_dispatch_release(_workQueue);
+ _workQueue = NULL;
+
+ if (_receivedHTTPHeaders) {
+ CFRelease(_receivedHTTPHeaders);
+ _receivedHTTPHeaders = NULL;
+ }
+
+ if (_delegateDispatchQueue) {
+ sr_dispatch_release(_delegateDispatchQueue);
+ _delegateDispatchQueue = NULL;
+ }
+}
+
+#ifndef NDEBUG
+
+- (void)setReadyState:(SRReadyState)aReadyState;
+{
+ [self willChangeValueForKey:@"readyState"];
+ assert(aReadyState > _readyState);
+ _readyState = aReadyState;
+ [self didChangeValueForKey:@"readyState"];
+}
+
+#endif
+
+- (void)open;
+{
+ assert(_url);
+ NSAssert(_readyState == SR_CONNECTING, @"Cannot call -(void)open on SRWebSocket more than once");
+
+ _selfRetain = self;
+
+ [self _connect];
+}
+
+// Calls block on delegate queue
+- (void)_performDelegateBlock:(dispatch_block_t)block;
+{
+ if (_delegateOperationQueue) {
+ [_delegateOperationQueue addOperationWithBlock:block];
+ } else {
+ assert(_delegateDispatchQueue);
+ dispatch_async(_delegateDispatchQueue, block);
+ }
+}
+
+- (void)setDelegateDispatchQueue:(dispatch_queue_t)queue;
+{
+ if (queue) {
+ sr_dispatch_retain(queue);
+ }
+
+ if (_delegateDispatchQueue) {
+ sr_dispatch_release(_delegateDispatchQueue);
+ }
+
+ _delegateDispatchQueue = queue;
+}
+
+- (BOOL)_checkHandshake:(CFHTTPMessageRef)httpMessage;
+{
+ NSString *acceptHeader = CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(httpMessage, CFSTR("Sec-WebSocket-Accept")));
+
+ if (acceptHeader == nil) {
+ return NO;
+ }
+
+ NSString *concattedString = [_secKey stringByAppendingString:SRWebSocketAppendToSecKeyString];
+ NSString *expectedAccept = [concattedString stringBySHA1ThenBase64Encoding];
+
+ return [acceptHeader isEqualToString:expectedAccept];
+}
+
+- (void)_HTTPHeadersDidFinish;
+{
+ NSInteger responseCode = CFHTTPMessageGetResponseStatusCode(_receivedHTTPHeaders);
+
+ if (responseCode >= 400) {
+ SRFastLog(@"Request failed with response code %d", responseCode);
+ [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2132 userInfo:@{NSLocalizedDescriptionKey:[NSString stringWithFormat:@"received bad response code from server %ld", (long)responseCode], SRHTTPResponseErrorKey:@(responseCode)}]];
+ return;
+ }
+
+ if(![self _checkHandshake:_receivedHTTPHeaders]) {
+ [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2133 userInfo:[NSDictionary dictionaryWithObject:[NSString stringWithFormat:@"Invalid Sec-WebSocket-Accept response"] forKey:NSLocalizedDescriptionKey]]];
+ return;
+ }
+
+ NSString *negotiatedProtocol = CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_receivedHTTPHeaders, CFSTR("Sec-WebSocket-Protocol")));
+ if (negotiatedProtocol) {
+ // Make sure we requested the protocol
+ if ([_requestedProtocols indexOfObject:negotiatedProtocol] == NSNotFound) {
+ [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2133 userInfo:[NSDictionary dictionaryWithObject:[NSString stringWithFormat:@"Server specified Sec-WebSocket-Protocol that wasn't requested"] forKey:NSLocalizedDescriptionKey]]];
+ return;
+ }
+
+ _protocol = negotiatedProtocol;
+ }
+
+ self.readyState = SR_OPEN;
+
+ if (!_didFail) {
+ [self _readFrameNew];
+ }
+
+ [self _performDelegateBlock:^{
+ if ([self.delegate respondsToSelector:@selector(webSocketDidOpen:)]) {
+ [self.delegate webSocketDidOpen:self];
+ };
+ }];
+}
+
+
+- (void)_readHTTPHeader;
+{
+ if (_receivedHTTPHeaders == NULL) {
+ _receivedHTTPHeaders = CFHTTPMessageCreateEmpty(NULL, NO);
+ }
+
+ [self _readUntilHeaderCompleteWithCallback:^(SRWebSocket *self, NSData *data) {
+ CFHTTPMessageAppendBytes(self->_receivedHTTPHeaders, (const UInt8 *)data.bytes, data.length);
+
+ if (CFHTTPMessageIsHeaderComplete(self->_receivedHTTPHeaders)) {
+ SRFastLog(@"Finished reading headers %@",
+ CFBridgingRelease(CFHTTPMessageCopyAllHeaderFields(self->_receivedHTTPHeaders)));
+ [self _HTTPHeadersDidFinish];
+ } else {
+ [self _readHTTPHeader];
+ }
+ }];
+}
+
+- (void)didConnect
+{
+ SRFastLog(@"Connected");
+ CFHTTPMessageRef request = CFHTTPMessageCreateRequest(NULL, CFSTR("GET"), (__bridge CFURLRef)_url, kCFHTTPVersion1_1);
+
+ // Set host first so it defaults
+ CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Host"), (__bridge CFStringRef)(_url.port ? [NSString stringWithFormat:@"%@:%@", _url.host, _url.port] : _url.host));
+
+ NSMutableData *keyBytes = [[NSMutableData alloc] initWithLength:16];
+ BOOL success = !SecRandomCopyBytes(kSecRandomDefault, keyBytes.length, keyBytes.mutableBytes);
+ assert(success);
+
+ if ([keyBytes respondsToSelector:@selector(base64EncodedStringWithOptions:)]) {
+ _secKey = [keyBytes base64EncodedStringWithOptions:0];
+ } else {
+ _secKey = [keyBytes base64Encoding];
+ }
+
+ assert([_secKey length] == 24);
+
+ CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Upgrade"), CFSTR("websocket"));
+ CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Connection"), CFSTR("Upgrade"));
+ CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Sec-WebSocket-Key"), (__bridge CFStringRef)_secKey);
+ CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Sec-WebSocket-Version"), (__bridge CFStringRef)[NSString stringWithFormat:@"%ld", (long)_webSocketVersion]);
+
+ CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Origin"), (__bridge CFStringRef)_url.SR_origin);
+
+ if (_requestedProtocols) {
+ CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Sec-WebSocket-Protocol"), (__bridge CFStringRef)[_requestedProtocols componentsJoinedByString:@", "]);
+ }
+
+ [_urlRequest.allHTTPHeaderFields enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop) {
+ CFHTTPMessageSetHeaderFieldValue(request, (__bridge CFStringRef)key, (__bridge CFStringRef)obj);
+ }];
+
+ NSData *message = CFBridgingRelease(CFHTTPMessageCopySerializedMessage(request));
+
+ CFRelease(request);
+
+ [self _writeData:message];
+ [self _readHTTPHeader];
+}
+
+- (void)_initializeStreams;
+{
+ assert(_url.port.unsignedIntValue <= UINT32_MAX);
+ uint32_t port = _url.port.unsignedIntValue;
+ if (port == 0) {
+ if (!_secure) {
+ port = 80;
+ } else {
+ port = 443;
+ }
+ }
+ NSString *host = _url.host;
+
+ CFReadStreamRef readStream = NULL;
+ CFWriteStreamRef writeStream = NULL;
+
+ CFStreamCreatePairWithSocketToHost(NULL, (__bridge CFStringRef)host, port, &readStream, &writeStream);
+
+ _outputStream = CFBridgingRelease(writeStream);
+ _inputStream = CFBridgingRelease(readStream);
+
+
+ if (_secure) {
+ NSMutableDictionary *SSLOptions = [[NSMutableDictionary alloc] init];
+
+ [_outputStream setProperty:(__bridge id)kCFStreamSocketSecurityLevelNegotiatedSSL forKey:(__bridge id)kCFStreamPropertySocketSecurityLevel];
+
+ // If we're using pinned certs, don't validate the certificate chain
+ if ([_urlRequest SR_SSLPinnedCertificates].count) {
+ [SSLOptions setValue:[NSNumber numberWithBool:NO] forKey:(__bridge id)kCFStreamSSLValidatesCertificateChain];
+ }
+
+#ifdef DEBUG
+ [SSLOptions setValue:[NSNumber numberWithBool:NO] forKey:(__bridge id)kCFStreamSSLValidatesCertificateChain];
+ NSLog(@"SocketRocket: In debug mode. Allowing connection to any root cert");
+#endif
+
+ [_outputStream setProperty:SSLOptions
+ forKey:(__bridge id)kCFStreamPropertySSLSettings];
+ }
+
+ _inputStream.delegate = self;
+ _outputStream.delegate = self;
+}
+
+- (void)_connect;
+{
+ if (!_scheduledRunloops.count) {
+ [self scheduleInRunLoop:[NSRunLoop SR_networkRunLoop] forMode:NSDefaultRunLoopMode];
+ }
+
+
+ [_outputStream open];
+ [_inputStream open];
+}
+
+- (void)scheduleInRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode;
+{
+ [_outputStream scheduleInRunLoop:aRunLoop forMode:mode];
+ [_inputStream scheduleInRunLoop:aRunLoop forMode:mode];
+
+ [_scheduledRunloops addObject:@[aRunLoop, mode]];
+}
+
+- (void)unscheduleFromRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode;
+{
+ [_outputStream removeFromRunLoop:aRunLoop forMode:mode];
+ [_inputStream removeFromRunLoop:aRunLoop forMode:mode];
+
+ [_scheduledRunloops removeObject:@[aRunLoop, mode]];
+}
+
+- (void)close;
+{
+ [self closeWithCode:SRStatusCodeNormal reason:nil];
+}
+
+- (void)closeWithCode:(NSInteger)code reason:(NSString *)reason;
+{
+ assert(code);
+ dispatch_async(_workQueue, ^{
+ if (self.readyState == SR_CLOSING || self.readyState == SR_CLOSED) {
+ return;
+ }
+
+ BOOL wasConnecting = self.readyState == SR_CONNECTING;
+
+ self.readyState = SR_CLOSING;
+
+ SRFastLog(@"Closing with code %d reason %@", code, reason);
+
+ if (wasConnecting) {
+ [self _disconnect];
+ return;
+ }
+
+ size_t maxMsgSize = [reason maximumLengthOfBytesUsingEncoding:NSUTF8StringEncoding];
+ NSMutableData *mutablePayload = [[NSMutableData alloc] initWithLength:sizeof(uint16_t) + maxMsgSize];
+ NSData *payload = mutablePayload;
+
+ ((uint16_t *)mutablePayload.mutableBytes)[0] = EndianU16_BtoN(code);
+
+ if (reason) {
+ NSRange remainingRange = {0};
+
+ NSUInteger usedLength = 0;
+
+ BOOL success = [reason getBytes:(char *)mutablePayload.mutableBytes + sizeof(uint16_t) maxLength:payload.length - sizeof(uint16_t) usedLength:&usedLength encoding:NSUTF8StringEncoding options:NSStringEncodingConversionExternalRepresentation range:NSMakeRange(0, reason.length) remainingRange:&remainingRange];
+
+ assert(success);
+ assert(remainingRange.length == 0);
+
+ if (usedLength != maxMsgSize) {
+ payload = [payload subdataWithRange:NSMakeRange(0, usedLength + sizeof(uint16_t))];
+ }
+ }
+
+
+ [self _sendFrameWithOpcode:SROpCodeConnectionClose data:payload];
+ });
+}
+
+- (void)_closeWithProtocolError:(NSString *)message;
+{
+ // Need to shunt this on the _callbackQueue first to see if they received any messages
+ [self _performDelegateBlock:^{
+ [self closeWithCode:SRStatusCodeProtocolError reason:message];
+ dispatch_async(self->_workQueue, ^{
+ [self _disconnect];
+ });
+ }];
+}
+
+- (void)_failWithError:(NSError *)error;
+{
+ dispatch_async(_workQueue, ^{
+ if (self.readyState != SR_CLOSED) {
+ self->_failed = YES;
+ [self _performDelegateBlock:^{
+ if ([self.delegate respondsToSelector:@selector(webSocket:didFailWithError:)]) {
+ [self.delegate webSocket:self didFailWithError:error];
+ }
+ }];
+
+ self.readyState = SR_CLOSED;
+ self->_selfRetain = nil;
+
+ SRFastLog(@"Failing with error %@", error.localizedDescription);
+
+ [self _disconnect];
+ }
+ });
+}
+
+- (void)_writeData:(NSData *)data;
+{
+ [self assertOnWorkQueue];
+
+ if (_closeWhenFinishedWriting) {
+ return;
+ }
+ [_outputBuffer appendData:data];
+ [self _pumpWriting];
+}
+
+- (void)send:(id)data;
+{
+ NSAssert(self.readyState != SR_CONNECTING, @"Invalid State: Cannot call send: until connection is open");
+ // TODO: maybe not copy this for performance
+ data = [data copy];
+ dispatch_async(_workQueue, ^{
+ if ([data isKindOfClass:[NSString class]]) {
+ [self _sendFrameWithOpcode:SROpCodeTextFrame data:[(NSString *)data dataUsingEncoding:NSUTF8StringEncoding]];
+ } else if ([data isKindOfClass:[NSData class]]) {
+ [self _sendFrameWithOpcode:SROpCodeBinaryFrame data:data];
+ } else if (data == nil) {
+ [self _sendFrameWithOpcode:SROpCodeTextFrame data:data];
+ } else {
+ assert(NO);
+ }
+ });
+}
+
+- (void)sendPing:(NSData *)data;
+{
+ NSAssert(self.readyState == SR_OPEN, @"Invalid State: Cannot call send: until connection is open");
+ // TODO: maybe not copy this for performance
+ data = [data copy] ?: [NSData data]; // It's okay for a ping to be empty
+ dispatch_async(_workQueue, ^{
+ [self _sendFrameWithOpcode:SROpCodePing data:data];
+ });
+}
+
+- (void)handlePing:(NSData *)pingData;
+{
+ // Need to pingpong this off _callbackQueue first to make sure messages happen in order
+ [self _performDelegateBlock:^{
+ dispatch_async(self->_workQueue, ^{
+ [self _sendFrameWithOpcode:SROpCodePong data:pingData];
+ });
+ }];
+}
+
+- (void)handlePong:(NSData *)pongData;
+{
+ SRFastLog(@"Received pong");
+ [self _performDelegateBlock:^{
+ if ([self.delegate respondsToSelector:@selector(webSocket:didReceivePong:)]) {
+ [self.delegate webSocket:self didReceivePong:pongData];
+ }
+ }];
+}
+
+- (void)_handleMessage:(id)message
+{
+ SRFastLog(@"Received message");
+ [self _performDelegateBlock:^{
+ [self.delegate webSocket:self didReceiveMessage:message];
+ }];
+}
+
+
+static inline BOOL closeCodeIsValid(int closeCode) {
+ if (closeCode < 1000) {
+ return NO;
+ }
+
+ if (closeCode >= 1000 && closeCode <= 1011) {
+ if (closeCode == 1004 ||
+ closeCode == 1005 ||
+ closeCode == 1006) {
+ return NO;
+ }
+ return YES;
+ }
+
+ if (closeCode >= 3000 && closeCode <= 3999) {
+ return YES;
+ }
+
+ if (closeCode >= 4000 && closeCode <= 4999) {
+ return YES;
+ }
+
+ return NO;
+}
+
+// Note from RFC:
+//
+// If there is a body, the first two
+// bytes of the body MUST be a 2-byte unsigned integer (in network byte
+// order) representing a status code with value /code/ defined in
+// Section 7.4. Following the 2-byte integer the body MAY contain UTF-8
+// encoded data with value /reason/, the interpretation of which is not
+// defined by this specification.
+
+- (void)handleCloseWithData:(NSData *)data;
+{
+ size_t dataSize = data.length;
+ __block uint16_t closeCode = 0;
+
+ SRFastLog(@"Received close frame");
+
+ if (dataSize == 1) {
+ // TODO handle error
+ [self _closeWithProtocolError:@"Payload for close must be larger than 2 bytes"];
+ return;
+ } else if (dataSize >= 2) {
+ [data getBytes:&closeCode length:sizeof(closeCode)];
+ _closeCode = EndianU16_BtoN(closeCode);
+ if (!closeCodeIsValid(_closeCode)) {
+ [self _closeWithProtocolError:[NSString stringWithFormat:@"Cannot have close code of %d", _closeCode]];
+ return;
+ }
+ if (dataSize > 2) {
+ _closeReason = [[NSString alloc] initWithData:[data subdataWithRange:NSMakeRange(2, dataSize - 2)] encoding:NSUTF8StringEncoding];
+ if (!_closeReason) {
+ [self _closeWithProtocolError:@"Close reason MUST be valid UTF-8"];
+ return;
+ }
+ }
+ } else {
+ _closeCode = SRStatusNoStatusReceived;
+ }
+
+ [self assertOnWorkQueue];
+
+ if (self.readyState == SR_OPEN) {
+ [self closeWithCode:1000 reason:nil];
+ }
+ dispatch_async(_workQueue, ^{
+ [self _disconnect];
+ });
+}
+
+- (void)_disconnect;
+{
+ [self assertOnWorkQueue];
+ SRFastLog(@"Trying to disconnect");
+ _closeWhenFinishedWriting = YES;
+ [self _pumpWriting];
+}
+
+- (void)_handleFrameWithData:(NSData *)frameData opCode:(NSInteger)opcode;
+{
+ // Check that the current data is valid UTF8
+
+ BOOL isControlFrame = (opcode == SROpCodePing || opcode == SROpCodePong || opcode == SROpCodeConnectionClose);
+ if (!isControlFrame) {
+ [self _readFrameNew];
+ } else {
+ dispatch_async(_workQueue, ^{
+ [self _readFrameContinue];
+ });
+ }
+
+ switch (opcode) {
+ case SROpCodeTextFrame: {
+ NSString *str = [[NSString alloc] initWithData:frameData encoding:NSUTF8StringEncoding];
+ if (str == nil && frameData) {
+ [self closeWithCode:SRStatusCodeInvalidUTF8 reason:@"Text frames must be valid UTF-8"];
+ dispatch_async(_workQueue, ^{
+ [self _disconnect];
+ });
+
+ return;
+ }
+ [self _handleMessage:str];
+ break;
+ }
+ case SROpCodeBinaryFrame:
+ [self _handleMessage:[frameData copy]];
+ break;
+ case SROpCodeConnectionClose:
+ [self handleCloseWithData:frameData];
+ break;
+ case SROpCodePing:
+ [self handlePing:frameData];
+ break;
+ case SROpCodePong:
+ [self handlePong:frameData];
+ break;
+ default:
+ [self _closeWithProtocolError:[NSString stringWithFormat:@"Unknown opcode %ld", (long)opcode]];
+ // TODO: Handle invalid opcode
+ break;
+ }
+}
+
+- (void)_handleFrameHeader:(frame_header)frame_header curData:(NSData *)curData;
+{
+ assert(frame_header.opcode != 0);
+
+ if (self.readyState != SR_OPEN) {
+ return;
+ }
+
+
+ BOOL isControlFrame = (frame_header.opcode == SROpCodePing || frame_header.opcode == SROpCodePong || frame_header.opcode == SROpCodeConnectionClose);
+
+ if (isControlFrame && !frame_header.fin) {
+ [self _closeWithProtocolError:@"Fragmented control frames not allowed"];
+ return;
+ }
+
+ if (isControlFrame && frame_header.payload_length >= 126) {
+ [self _closeWithProtocolError:@"Control frames cannot have payloads larger than 126 bytes"];
+ return;
+ }
+
+ if (!isControlFrame) {
+ _currentFrameOpcode = frame_header.opcode;
+ _currentFrameCount += 1;
+ }
+
+ if (frame_header.payload_length == 0) {
+ if (isControlFrame) {
+ [self _handleFrameWithData:curData opCode:frame_header.opcode];
+ } else {
+ if (frame_header.fin) {
+ [self _handleFrameWithData:_currentFrameData opCode:frame_header.opcode];
+ } else {
+ // TODO add assert that opcode is not a control;
+ [self _readFrameContinue];
+ }
+ }
+ } else {
+ assert(frame_header.payload_length <= SIZE_T_MAX);
+ [self _addConsumerWithDataLength:(size_t)frame_header.payload_length callback:^(SRWebSocket *self, NSData *newData) {
+ if (isControlFrame) {
+ [self _handleFrameWithData:newData opCode:frame_header.opcode];
+ } else {
+ if (frame_header.fin) {
+ [self _handleFrameWithData:self->_currentFrameData opCode:frame_header.opcode];
+ } else {
+ // TODO add assert that opcode is not a control;
+ [self _readFrameContinue];
+ }
+
+ }
+ } readToCurrentFrame:!isControlFrame unmaskBytes:frame_header.masked];
+ }
+}
+
+/* From RFC:
+
+ 0 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-------+-+-------------+-------------------------------+
+ |F|R|R|R| opcode|M| Payload len | Extended payload length |
+ |I|S|S|S| (4) |A| (7) | (16/64) |
+ |N|V|V|V| |S| | (if payload len==126/127) |
+ | |1|2|3| |K| | |
+ +-+-+-+-+-------+-+-------------+ - - - - - - - - - - - - - - - +
+ | Extended payload length continued, if payload len == 127 |
+ + - - - - - - - - - - - - - - - +-------------------------------+
+ | |Masking-key, if MASK set to 1 |
+ +-------------------------------+-------------------------------+
+ | Masking-key (continued) | Payload Data |
+ +-------------------------------- - - - - - - - - - - - - - - - +
+ : Payload Data continued ... :
+ + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +
+ | Payload Data continued ... |
+ +---------------------------------------------------------------+
+ */
+
+static const uint8_t SRFinMask = 0x80;
+static const uint8_t SROpCodeMask = 0x0F;
+static const uint8_t SRRsvMask = 0x70;
+static const uint8_t SRMaskMask = 0x80;
+static const uint8_t SRPayloadLenMask = 0x7F;
+
+
+- (void)_readFrameContinue;
+{
+ assert((_currentFrameCount == 0 && _currentFrameOpcode == 0) || (_currentFrameCount > 0 && _currentFrameOpcode > 0));
+
+ [self _addConsumerWithDataLength:2 callback:^(SRWebSocket *self, NSData *data) {
+ __block frame_header header = {0};
+
+ const uint8_t *headerBuffer = data.bytes;
+ assert(data.length >= 2);
+
+ if (headerBuffer[0] & SRRsvMask) {
+ [self _closeWithProtocolError:@"Server used RSV bits"];
+ return;
+ }
+
+ uint8_t receivedOpcode = (SROpCodeMask & headerBuffer[0]);
+
+ BOOL isControlFrame = (receivedOpcode == SROpCodePing || receivedOpcode == SROpCodePong || receivedOpcode == SROpCodeConnectionClose);
+
+ if (!isControlFrame && receivedOpcode != 0 && self->_currentFrameCount > 0) {
+ [self _closeWithProtocolError:@"all data frames after the initial data frame must have opcode 0"];
+ return;
+ }
+
+ if (receivedOpcode == 0 && self->_currentFrameCount == 0) {
+ [self _closeWithProtocolError:@"cannot continue a message"];
+ return;
+ }
+
+ header.opcode = receivedOpcode == 0 ? self->_currentFrameOpcode : receivedOpcode;
+
+ header.fin = !!(SRFinMask & headerBuffer[0]);
+
+
+ header.masked = !!(SRMaskMask & headerBuffer[1]);
+ header.payload_length = SRPayloadLenMask & headerBuffer[1];
+
+ headerBuffer = NULL;
+
+ if (header.masked) {
+ [self _closeWithProtocolError:@"Client must receive unmasked data"];
+ }
+
+ size_t extra_bytes_needed = header.masked ? sizeof(self->_currentReadMaskKey) : 0;
+
+ if (header.payload_length == 126) {
+ extra_bytes_needed += sizeof(uint16_t);
+ } else if (header.payload_length == 127) {
+ extra_bytes_needed += sizeof(uint64_t);
+ }
+
+ if (extra_bytes_needed == 0) {
+ [self _handleFrameHeader:header curData:self->_currentFrameData];
+ } else {
+ [self _addConsumerWithDataLength:extra_bytes_needed callback:^(SRWebSocket *self, NSData *data) {
+ size_t mapped_size = data.length;
+ const void *mapped_buffer = data.bytes;
+ size_t offset = 0;
+
+ if (header.payload_length == 126) {
+ assert(mapped_size >= sizeof(uint16_t));
+ uint16_t newLen = EndianU16_BtoN(*(uint16_t *)(mapped_buffer));
+ header.payload_length = newLen;
+ offset += sizeof(uint16_t);
+ } else if (header.payload_length == 127) {
+ assert(mapped_size >= sizeof(uint64_t));
+ header.payload_length = EndianU64_BtoN(*(uint64_t *)(mapped_buffer));
+ offset += sizeof(uint64_t);
+ } else {
+ assert(header.payload_length < 126 && header.payload_length >= 0);
+ }
+
+
+ if (header.masked) {
+ assert(mapped_size >= sizeof(self->_currentReadMaskOffset) + offset);
+ memcpy(self->_currentReadMaskKey,
+ ((uint8_t *)mapped_buffer) + offset,
+ sizeof(self->_currentReadMaskKey));
+ }
+
+ [self _handleFrameHeader:header curData:self->_currentFrameData];
+ } readToCurrentFrame:NO unmaskBytes:NO];
+ }
+ } readToCurrentFrame:NO unmaskBytes:NO];
+}
+
+- (void)_readFrameNew;
+{
+ dispatch_async(_workQueue, ^{
+ [self->_currentFrameData setLength:0];
+
+ self->_currentFrameOpcode = 0;
+ self->_currentFrameCount = 0;
+ self->_readOpCount = 0;
+ self->_currentStringScanPosition = 0;
+
+ [self _readFrameContinue];
+ });
+}
+
+- (void)_pumpWriting;
+{
+ [self assertOnWorkQueue];
+
+ NSUInteger dataLength = _outputBuffer.length;
+ if (dataLength - _outputBufferOffset > 0 && _outputStream.hasSpaceAvailable) {
+ NSInteger bytesWritten = [_outputStream write:_outputBuffer.bytes + _outputBufferOffset maxLength:dataLength - _outputBufferOffset];
+ if (bytesWritten == -1) {
+ [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2145 userInfo:[NSDictionary dictionaryWithObject:@"Error writing to stream" forKey:NSLocalizedDescriptionKey]]];
+ return;
+ }
+
+ _outputBufferOffset += bytesWritten;
+
+ if (_outputBufferOffset > 4096 && _outputBufferOffset > (_outputBuffer.length >> 1)) {
+ _outputBuffer = [[NSMutableData alloc] initWithBytes:(char *)_outputBuffer.bytes + _outputBufferOffset length:_outputBuffer.length - _outputBufferOffset];
+ _outputBufferOffset = 0;
+ }
+ }
+
+ if (_closeWhenFinishedWriting &&
+ _outputBuffer.length - _outputBufferOffset == 0 &&
+ (_inputStream.streamStatus != NSStreamStatusNotOpen &&
+ _inputStream.streamStatus != NSStreamStatusClosed) &&
+ !_sentClose) {
+ _sentClose = YES;
+
+ [_outputStream close];
+ [_inputStream close];
+
+
+ for (NSArray *runLoop in [_scheduledRunloops copy]) {
+ [self unscheduleFromRunLoop:[runLoop objectAtIndex:0] forMode:[runLoop objectAtIndex:1]];
+ }
+
+ if (!_failed) {
+ [self _performDelegateBlock:^{
+ if ([self.delegate respondsToSelector:@selector(webSocket:didCloseWithCode:reason:wasClean:)]) {
+ [self.delegate webSocket:self
+ didCloseWithCode:self->_closeCode
+ reason:self->_closeReason
+ wasClean:YES];
+ }
+ }];
+ }
+
+ _selfRetain = nil;
+ }
+}
+
+- (void)_addConsumerWithScanner:(stream_scanner)consumer callback:(data_callback)callback;
+{
+ [self assertOnWorkQueue];
+ [self _addConsumerWithScanner:consumer callback:callback dataLength:0];
+}
+
+- (void)_addConsumerWithDataLength:(size_t)dataLength callback:(data_callback)callback readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes;
+{
+ [self assertOnWorkQueue];
+ assert(dataLength);
+
+ [_consumers addObject:[_consumerPool consumerWithScanner:nil handler:callback bytesNeeded:dataLength readToCurrentFrame:readToCurrentFrame unmaskBytes:unmaskBytes]];
+ [self _pumpScanner];
+}
+
+- (void)_addConsumerWithScanner:(stream_scanner)consumer callback:(data_callback)callback dataLength:(size_t)dataLength;
+{
+ [self assertOnWorkQueue];
+ [_consumers addObject:[_consumerPool consumerWithScanner:consumer handler:callback bytesNeeded:dataLength readToCurrentFrame:NO unmaskBytes:NO]];
+ [self _pumpScanner];
+}
+
+
+static const char CRLFCRLFBytes[] = {'\r', '\n', '\r', '\n'};
+
+- (void)_readUntilHeaderCompleteWithCallback:(data_callback)dataHandler;
+{
+ [self _readUntilBytes:CRLFCRLFBytes length:sizeof(CRLFCRLFBytes) callback:dataHandler];
+}
+
+- (void)_readUntilBytes:(const void *)bytes length:(size_t)length callback:(data_callback)dataHandler;
+{
+ // TODO optimize so this can continue from where we last searched
+ stream_scanner consumer = ^size_t(NSData *data) {
+ __block size_t found_size = 0;
+ __block size_t match_count = 0;
+
+ size_t size = data.length;
+ const unsigned char *buffer = data.bytes;
+ for (size_t i = 0; i < size; i++ ) {
+ if (((const unsigned char *)buffer)[i] == ((const unsigned char *)bytes)[match_count]) {
+ match_count += 1;
+ if (match_count == length) {
+ found_size = i + 1;
+ break;
+ }
+ } else {
+ match_count = 0;
+ }
+ }
+ return found_size;
+ };
+ [self _addConsumerWithScanner:consumer callback:dataHandler];
+}
+
+
+// Returns true if did work
+- (BOOL)_innerPumpScanner {
+
+ BOOL didWork = NO;
+
+ if (self.readyState >= SR_CLOSING) {
+ return didWork;
+ }
+
+ if (!_consumers.count) {
+ return didWork;
+ }
+
+ size_t curSize = _readBuffer.length - _readBufferOffset;
+ if (!curSize) {
+ return didWork;
+ }
+
+ SRIOConsumer *consumer = [_consumers objectAtIndex:0];
+
+ size_t bytesNeeded = consumer.bytesNeeded;
+
+ size_t foundSize = 0;
+ if (consumer.consumer) {
+ NSData *tempView = [NSData dataWithBytesNoCopy:(char *)_readBuffer.bytes + _readBufferOffset length:_readBuffer.length - _readBufferOffset freeWhenDone:NO];
+ foundSize = consumer.consumer(tempView);
+ } else {
+ assert(consumer.bytesNeeded);
+ if (curSize >= bytesNeeded) {
+ foundSize = bytesNeeded;
+ } else if (consumer.readToCurrentFrame) {
+ foundSize = curSize;
+ }
+ }
+
+ NSData *slice = nil;
+ if (consumer.readToCurrentFrame || foundSize) {
+ NSRange sliceRange = NSMakeRange(_readBufferOffset, foundSize);
+ slice = [_readBuffer subdataWithRange:sliceRange];
+
+ _readBufferOffset += foundSize;
+
+ if (_readBufferOffset > 4096 && _readBufferOffset > (_readBuffer.length >> 1)) {
+ _readBuffer = [[NSMutableData alloc] initWithBytes:(char *)_readBuffer.bytes + _readBufferOffset length:_readBuffer.length - _readBufferOffset]; _readBufferOffset = 0;
+ }
+
+ if (consumer.unmaskBytes) {
+ NSMutableData *mutableSlice = [slice mutableCopy];
+
+ NSUInteger len = mutableSlice.length;
+ uint8_t *bytes = mutableSlice.mutableBytes;
+
+ for (NSUInteger i = 0; i < len; i++) {
+ bytes[i] = bytes[i] ^ _currentReadMaskKey[_currentReadMaskOffset % sizeof(_currentReadMaskKey)];
+ _currentReadMaskOffset += 1;
+ }
+
+ slice = mutableSlice;
+ }
+
+ if (consumer.readToCurrentFrame) {
+ [_currentFrameData appendData:slice];
+
+ _readOpCount += 1;
+
+ if (_currentFrameOpcode == SROpCodeTextFrame) {
+ // Validate UTF8 stuff.
+ size_t currentDataSize = _currentFrameData.length;
+ if (_currentFrameOpcode == SROpCodeTextFrame && currentDataSize > 0) {
+ // TODO: Optimize the crap out of this. Don't really have to copy all the data each time
+
+ size_t scanSize = currentDataSize - _currentStringScanPosition;
+
+ NSData *scan_data = [_currentFrameData subdataWithRange:NSMakeRange(_currentStringScanPosition, scanSize)];
+ int32_t valid_utf8_size = validate_dispatch_data_partial_string(scan_data);
+
+ if (valid_utf8_size == -1) {
+ [self closeWithCode:SRStatusCodeInvalidUTF8 reason:@"Text frames must be valid UTF-8"];
+ dispatch_async(_workQueue, ^{
+ [self _disconnect];
+ });
+ return didWork;
+ } else {
+ _currentStringScanPosition += valid_utf8_size;
+ }
+ }
+
+ }
+
+ consumer.bytesNeeded -= foundSize;
+
+ if (consumer.bytesNeeded == 0) {
+ [_consumers removeObjectAtIndex:0];
+ consumer.handler(self, nil);
+ [_consumerPool returnConsumer:consumer];
+ didWork = YES;
+ }
+ } else if (foundSize) {
+ [_consumers removeObjectAtIndex:0];
+ consumer.handler(self, slice);
+ [_consumerPool returnConsumer:consumer];
+ didWork = YES;
+ }
+ }
+ return didWork;
+}
+
+-(void)_pumpScanner;
+{
+ [self assertOnWorkQueue];
+
+ if (!_isPumping) {
+ _isPumping = YES;
+ } else {
+ return;
+ }
+
+ while ([self _innerPumpScanner]) {
+
+ }
+
+ _isPumping = NO;
+}
+
+//#define NOMASK
+
+static const size_t SRFrameHeaderOverhead = 32;
+
+- (void)_sendFrameWithOpcode:(SROpCode)opcode data:(id)data;
+{
+ [self assertOnWorkQueue];
+
+ if (nil == data) {
+ return;
+ }
+
+ NSAssert([data isKindOfClass:[NSData class]] || [data isKindOfClass:[NSString class]], @"NSString or NSData");
+
+ size_t payloadLength = [data isKindOfClass:[NSString class]] ? [(NSString *)data lengthOfBytesUsingEncoding:NSUTF8StringEncoding] : [data length];
+
+ NSMutableData *frame = [[NSMutableData alloc] initWithLength:payloadLength + SRFrameHeaderOverhead];
+ if (!frame) {
+ [self closeWithCode:SRStatusCodeMessageTooBig reason:@"Message too big"];
+ return;
+ }
+ uint8_t *frame_buffer = (uint8_t *)[frame mutableBytes];
+
+ // set fin
+ frame_buffer[0] = SRFinMask | opcode;
+
+ BOOL useMask = YES;
+#ifdef NOMASK
+ useMask = NO;
+#endif
+
+ if (useMask) {
+ // set the mask and header
+ frame_buffer[1] |= SRMaskMask;
+ }
+
+ size_t frame_buffer_size = 2;
+
+ const uint8_t *unmasked_payload = NULL;
+ if ([data isKindOfClass:[NSData class]]) {
+ unmasked_payload = (uint8_t *)[data bytes];
+ } else if ([data isKindOfClass:[NSString class]]) {
+ unmasked_payload = (const uint8_t *)[data UTF8String];
+ } else {
+ return;
+ }
+
+ if (payloadLength < 126) {
+ frame_buffer[1] |= payloadLength;
+ } else if (payloadLength <= UINT16_MAX) {
+ frame_buffer[1] |= 126;
+ *((uint16_t *)(frame_buffer + frame_buffer_size)) = EndianU16_BtoN((uint16_t)payloadLength);
+ frame_buffer_size += sizeof(uint16_t);
+ } else {
+ frame_buffer[1] |= 127;
+ *((uint64_t *)(frame_buffer + frame_buffer_size)) = EndianU64_BtoN((uint64_t)payloadLength);
+ frame_buffer_size += sizeof(uint64_t);
+ }
+
+ if (!useMask) {
+ for (size_t i = 0; i < payloadLength; i++) {
+ frame_buffer[frame_buffer_size] = unmasked_payload[i];
+ frame_buffer_size += 1;
+ }
+ } else {
+ uint8_t *mask_key = frame_buffer + frame_buffer_size;
+ BOOL success = !SecRandomCopyBytes(kSecRandomDefault, sizeof(uint32_t), (uint8_t *)mask_key);
+ assert(success);
+ frame_buffer_size += sizeof(uint32_t);
+
+ // TODO: could probably optimize this with SIMD
+ for (size_t i = 0; i < payloadLength; i++) {
+ frame_buffer[frame_buffer_size] = unmasked_payload[i] ^ mask_key[i % sizeof(uint32_t)];
+ frame_buffer_size += 1;
+ }
+ }
+
+ assert(frame_buffer_size <= [frame length]);
+ frame.length = frame_buffer_size;
+
+ [self _writeData:frame];
+}
+
+- (void)stream:(NSStream *)aStream handleEvent:(NSStreamEvent)eventCode;
+{
+ if (_secure && !_pinnedCertFound && (eventCode == NSStreamEventHasBytesAvailable || eventCode == NSStreamEventHasSpaceAvailable)) {
+
+ NSArray *sslCerts = [_urlRequest SR_SSLPinnedCertificates];
+ if (sslCerts) {
+ SecTrustRef secTrust = (__bridge SecTrustRef)[aStream propertyForKey:(__bridge id)kCFStreamPropertySSLPeerTrust];
+ if (secTrust) {
+ NSInteger numCerts = SecTrustGetCertificateCount(secTrust);
+ for (NSInteger i = 0; i < numCerts && !_pinnedCertFound; i++) {
+ SecCertificateRef cert = SecTrustGetCertificateAtIndex(secTrust, i);
+ NSData *certData = CFBridgingRelease(SecCertificateCopyData(cert));
+
+ for (id ref in sslCerts) {
+ SecCertificateRef trustedCert = (__bridge SecCertificateRef)ref;
+ NSData *trustedCertData = CFBridgingRelease(SecCertificateCopyData(trustedCert));
+
+ if ([trustedCertData isEqualToData:certData]) {
+ _pinnedCertFound = YES;
+ break;
+ }
+ }
+ }
+ }
+
+ if (!_pinnedCertFound) {
+ dispatch_async(_workQueue, ^{
+ [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:23556 userInfo:[NSDictionary dictionaryWithObject:[NSString stringWithFormat:@"Invalid server cert"] forKey:NSLocalizedDescriptionKey]]];
+ });
+ return;
+ }
+ }
+ }
+
+ dispatch_async(_workQueue, ^{
+ switch (eventCode) {
+ case NSStreamEventOpenCompleted: {
+ SRFastLog(@"NSStreamEventOpenCompleted %@", aStream);
+ if (self.readyState >= SR_CLOSING) {
+ return;
+ }
+ assert(self->_readBuffer);
+
+ if (self.readyState == SR_CONNECTING && aStream == self->_inputStream) {
+ [self didConnect];
+ }
+ [self _pumpWriting];
+ [self _pumpScanner];
+ break;
+ }
+
+ case NSStreamEventErrorOccurred: {
+ SRFastLog(@"NSStreamEventErrorOccurred %@ %@", aStream, [[aStream streamError] copy]);
+ /// TODO specify error better!
+ [self _failWithError:aStream.streamError];
+ self->_readBufferOffset = 0;
+ [self->_readBuffer setLength:0];
+ break;
+
+ }
+
+ case NSStreamEventEndEncountered: {
+ [self _pumpScanner];
+ SRFastLog(@"NSStreamEventEndEncountered %@", aStream);
+ if (aStream.streamError) {
+ [self _failWithError:aStream.streamError];
+ } else {
+ if (self.readyState != SR_CLOSED) {
+ self.readyState = SR_CLOSED;
+ self->_selfRetain = nil;
+ }
+
+ if (!self->_sentClose && !self->_failed) {
+ self->_sentClose = YES;
+ // If we get closed in this state it's probably not clean because we should be
+ // sending this when we send messages
+ [self
+ _performDelegateBlock:^{
+ if ([self.delegate respondsToSelector:@selector(webSocket:didCloseWithCode:reason:wasClean:)]) {
+ [self.delegate webSocket:self
+ didCloseWithCode:SRStatusCodeGoingAway
+ reason:@"Stream end encountered"
+ wasClean:NO];
+ }
+ }];
+ }
+ }
+
+ break;
+ }
+
+ case NSStreamEventHasBytesAvailable: {
+ SRFastLog(@"NSStreamEventHasBytesAvailable %@", aStream);
+ enum EnumType : int { bufferSize = 2048 };
+ uint8_t buffer[bufferSize];
+
+ while (self->_inputStream.hasBytesAvailable) {
+ NSInteger bytes_read = [self->_inputStream read:buffer maxLength:bufferSize];
+
+ if (bytes_read > 0) {
+ [self->_readBuffer appendBytes:buffer length:bytes_read];
+ } else if (bytes_read < 0) {
+ [self _failWithError:self->_inputStream.streamError];
+ }
+
+ if (bytes_read != bufferSize) {
+ break;
+ }
+ };
+ [self _pumpScanner];
+ break;
+ }
+
+ case NSStreamEventHasSpaceAvailable: {
+ SRFastLog(@"NSStreamEventHasSpaceAvailable %@", aStream);
+ [self _pumpWriting];
+ break;
+ }
+
+ default:
+ SRFastLog(@"(default) %@", aStream);
+ break;
+ }
+ });
+}
+
+@end
+
+
+@implementation SRIOConsumer
+
+@synthesize bytesNeeded = _bytesNeeded;
+@synthesize consumer = _scanner;
+@synthesize handler = _handler;
+@synthesize readToCurrentFrame = _readToCurrentFrame;
+@synthesize unmaskBytes = _unmaskBytes;
+
+- (void)setupWithScanner:(stream_scanner)scanner handler:(data_callback)handler bytesNeeded:(size_t)bytesNeeded readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes;
+{
+ _scanner = [scanner copy];
+ _handler = [handler copy];
+ _bytesNeeded = bytesNeeded;
+ _readToCurrentFrame = readToCurrentFrame;
+ _unmaskBytes = unmaskBytes;
+ assert(_scanner || _bytesNeeded);
+}
+
+
+@end
+
+
+@implementation SRIOConsumerPool {
+ NSUInteger _poolSize;
+ NSMutableArray *_bufferedConsumers;
+}
+
+- (id)initWithBufferCapacity:(NSUInteger)poolSize;
+{
+ self = [super init];
+ if (self) {
+ _poolSize = poolSize;
+ _bufferedConsumers = [[NSMutableArray alloc] initWithCapacity:poolSize];
+ }
+ return self;
+}
+
+- (id)init
+{
+ return [self initWithBufferCapacity:8];
+}
+
+- (SRIOConsumer *)consumerWithScanner:(stream_scanner)scanner handler:(data_callback)handler bytesNeeded:(size_t)bytesNeeded readToCurrentFrame:(BOOL)readToCurrentFrame unmaskBytes:(BOOL)unmaskBytes;
+{
+ SRIOConsumer *consumer = nil;
+ if (_bufferedConsumers.count) {
+ consumer = [_bufferedConsumers lastObject];
+ [_bufferedConsumers removeLastObject];
+ } else {
+ consumer = [[SRIOConsumer alloc] init];
+ }
+
+ [consumer setupWithScanner:scanner handler:handler bytesNeeded:bytesNeeded readToCurrentFrame:readToCurrentFrame unmaskBytes:unmaskBytes];
+
+ return consumer;
+}
+
+- (void)returnConsumer:(SRIOConsumer *)consumer;
+{
+ if (_bufferedConsumers.count < _poolSize) {
+ [_bufferedConsumers addObject:consumer];
+ }
+}
+
+@end
+
+
+@implementation NSURLRequest (CertificateAdditions)
+
+- (NSArray *)SR_SSLPinnedCertificates;
+{
+ return [NSURLProtocol propertyForKey:@"SR_SSLPinnedCertificates" inRequest:self];
+}
+
+@end
+
+@implementation NSMutableURLRequest (CertificateAdditions)
+
+- (NSArray *)SR_SSLPinnedCertificates;
+{
+ return [NSURLProtocol propertyForKey:@"SR_SSLPinnedCertificates" inRequest:self];
+}
+
+- (void)setSR_SSLPinnedCertificates:(NSArray *)SR_SSLPinnedCertificates;
+{
+ [NSURLProtocol setProperty:SR_SSLPinnedCertificates forKey:@"SR_SSLPinnedCertificates" inRequest:self];
+}
+
+@end
+
+@implementation NSURL (SRWebSocket)
+
+- (NSString *)SR_origin;
+{
+ NSString *scheme = [self.scheme lowercaseString];
+
+ if ([scheme isEqualToString:@"wss"]) {
+ scheme = @"https";
+ } else if ([scheme isEqualToString:@"ws"]) {
+ scheme = @"http";
+ }
+
+ if (self.port) {
+ return [NSString stringWithFormat:@"%@://%@:%@/", scheme, self.host, self.port];
+ } else {
+ return [NSString stringWithFormat:@"%@://%@/", scheme, self.host];
+ }
+}
+
+@end
+
+//#define SR_ENABLE_LOG
+
+static inline void SRFastLog(NSString *format, ...) {
+#ifdef SR_ENABLE_LOG
+ __block va_list arg_list;
+ va_start (arg_list, format);
+
+ NSString *formattedString = [[NSString alloc] initWithFormat:format arguments:arg_list];
+
+ va_end(arg_list);
+
+ NSLog(@"[SR] %@", formattedString);
+#endif
+}
+
+
+#ifdef HAS_ICU
+
+static inline int32_t validate_dispatch_data_partial_string(NSData *data) {
+ if ([data length] > INT32_MAX) {
+ // INT32_MAX is the limit so long as this Framework is using 32 bit ints everywhere.
+ return -1;
+ }
+
+ int32_t size = (int32_t)[data length];
+
+ const void * contents = [data bytes];
+ const uint8_t *str = (const uint8_t *)contents;
+
+ UChar32 codepoint = 1;
+ int32_t offset = 0;
+ int32_t lastOffset = 0;
+ while(offset < size && codepoint > 0) {
+ lastOffset = offset;
+ U8_NEXT(str, offset, size, codepoint);
+ }
+
+ if (codepoint == -1) {
+ // Check to see if the last byte is valid or whether it was just continuing
+ if (!U8_IS_LEAD(str[lastOffset]) || U8_COUNT_TRAIL_BYTES(str[lastOffset]) + lastOffset < (int32_t)size) {
+
+ size = -1;
+ } else {
+ uint8_t leadByte = str[lastOffset];
+ U8_MASK_LEAD_BYTE(leadByte, U8_COUNT_TRAIL_BYTES(leadByte));
+
+ for (int i = lastOffset + 1; i < offset; i++) {
+ if (U8_IS_SINGLE(str[i]) || U8_IS_LEAD(str[i]) || !U8_IS_TRAIL(str[i])) {
+ size = -1;
+ }
+ }
+
+ if (size != -1) {
+ size = lastOffset;
+ }
+ }
+ }
+
+ if (size != -1 && ![[NSString alloc] initWithBytesNoCopy:(char *)[data bytes] length:size encoding:NSUTF8StringEncoding freeWhenDone:NO]) {
+ size = -1;
+ }
+
+ return size;
+}
+
+#else
+
+// This is a hack, and probably not optimal
+static inline int32_t validate_dispatch_data_partial_string(NSData *data) {
+ static const int maxCodepointSize = 3;
+
+ for (int i = 0; i < maxCodepointSize; i++) {
+ NSString *str = [[NSString alloc] initWithBytesNoCopy:(char *)data.bytes length:data.length - i encoding:NSUTF8StringEncoding freeWhenDone:NO];
+ if (str) {
+ return data.length - i;
+ }
+ }
+
+ return -1;
+}
+
+#endif
+
+static _SRRunLoopThread *networkThread = nil;
+static NSRunLoop *networkRunLoop = nil;
+
+@implementation NSRunLoop (SRWebSocket)
+
++ (NSRunLoop *)SR_networkRunLoop {
+ static dispatch_once_t onceToken;
+ dispatch_once(&onceToken, ^{
+ networkThread = [[_SRRunLoopThread alloc] init];
+ networkThread.name = @"com.squareup.SocketRocket.NetworkThread";
+ [networkThread start];
+ networkRunLoop = networkThread.runLoop;
+ });
+
+ return networkRunLoop;
+}
+
+@end
+
+
+@implementation _SRRunLoopThread {
+ dispatch_group_t _waitGroup;
+}
+
+@synthesize runLoop = _runLoop;
+
+- (void)dealloc
+{
+ sr_dispatch_release(_waitGroup);
+}
+
+- (id)init
+{
+ self = [super init];
+ if (self) {
+ _waitGroup = dispatch_group_create();
+ dispatch_group_enter(_waitGroup);
+ }
+ return self;
+}
+
+- (void)main;
+{
+ @autoreleasepool {
+ _runLoop = [NSRunLoop currentRunLoop];
+ dispatch_group_leave(_waitGroup);
+
+ NSTimer *timer = [[NSTimer alloc] initWithFireDate:[NSDate distantFuture] interval:0.0 target:nil selector:nil userInfo:nil repeats:NO];
+ [_runLoop addTimer:timer forMode:NSDefaultRunLoopMode];
+
+ while ([_runLoop runMode:NSDefaultRunLoopMode beforeDate:[NSDate distantFuture]]) {
+
+ }
+ assert(NO);
+ }
+}
+
+- (NSRunLoop *)runLoop;
+{
+ dispatch_group_wait(_waitGroup, DISPATCH_TIME_FOREVER);
+ return _runLoop;
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objc/Icon-120.png b/third_party/libwebrtc/examples/objc/Icon-120.png
new file mode 100644
index 0000000000..938fef477b
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/Icon-120.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/Icon-180.png b/third_party/libwebrtc/examples/objc/Icon-180.png
new file mode 100644
index 0000000000..a5b7609680
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/Icon-180.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/Icon.png b/third_party/libwebrtc/examples/objc/Icon.png
new file mode 100644
index 0000000000..55773ca9d9
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/Icon.png
Binary files differ
diff --git a/third_party/libwebrtc/examples/objc/README b/third_party/libwebrtc/examples/objc/README
new file mode 100644
index 0000000000..bfe18b37c5
--- /dev/null
+++ b/third_party/libwebrtc/examples/objc/README
@@ -0,0 +1,3 @@
+This directory contains sample iOS and mac clients for http://apprtc.appspot.com
+
+See ../../app/webrtc/objc/README for information on how to use it.
diff --git a/third_party/libwebrtc/examples/objcnativeapi/Info.plist b/third_party/libwebrtc/examples/objcnativeapi/Info.plist
new file mode 100644
index 0000000000..cbc9e5f9f3
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/Info.plist
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleExecutable</key>
+ <string>$(EXECUTABLE_NAME)</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.ObjCNativeAPIDemo</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>ObjCNativeAPIDemo</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>LSRequiresIPhoneOS</key>
+ <true/>
+ <key>UIRequiredDeviceCapabilities</key>
+ <array>
+ <string>armv7</string>
+ </array>
+ <key>UISupportedInterfaceOrientations</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+ <key>UISupportedInterfaceOrientations~ipad</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationPortraitUpsideDown</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+ <key>NSCameraUsageDescription</key>
+ <string>Camera access needed for video calling</string>
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Microphone access needed for video calling</string>
+</dict>
+</plist>
diff --git a/third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.h b/third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.h
new file mode 100644
index 0000000000..02372dbfd2
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+@interface NADAppDelegate : UIResponder <UIApplicationDelegate>
+
+@property(strong, nonatomic) UIWindow* window;
+
+@end
diff --git a/third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.m b/third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.m
new file mode 100644
index 0000000000..254dd3be76
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/objc/NADAppDelegate.m
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "NADAppDelegate.h"
+
+#import "NADViewController.h"
+
+@interface NADAppDelegate ()
+@end
+
+@implementation NADAppDelegate
+
+@synthesize window = _window;
+
+- (BOOL)application:(UIApplication *)application
+ didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
+ _window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
+ [_window makeKeyAndVisible];
+
+ NADViewController *viewController = [[NADViewController alloc] init];
+ _window.rootViewController = viewController;
+
+ return YES;
+}
+
+- (void)applicationWillResignActive:(UIApplication *)application {
+ // Sent when the application is about to move from active to inactive state. This can occur for
+ // certain types of temporary interruptions (such as an incoming phone call or SMS message) or
+ // when the user quits the application and it begins the transition to the background state. Use
+ // this method to pause ongoing tasks, disable timers, and invalidate graphics rendering
+ // callbacks. Games should use this method to pause the game.
+}
+
+- (void)applicationDidEnterBackground:(UIApplication *)application {
+ // Use this method to release shared resources, save user data, invalidate timers, and store
+ // enough application state information to restore your application to its current state in case
+ // it is terminated later. If your application supports background execution, this method is
+ // called instead of applicationWillTerminate: when the user quits.
+}
+
+- (void)applicationWillEnterForeground:(UIApplication *)application {
+ // Called as part of the transition from the background to the active state; here you can undo
+ // many of the changes made on entering the background.
+}
+
+- (void)applicationDidBecomeActive:(UIApplication *)application {
+ // Restart any tasks that were paused (or not yet started) while the application was inactive. If
+ // the application was previously in the background, optionally refresh the user interface.
+}
+
+- (void)applicationWillTerminate:(UIApplication *)application {
+ // Called when the application is about to terminate. Save data if appropriate. See also
+ // applicationDidEnterBackground:.
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.h b/third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.h
new file mode 100644
index 0000000000..c43bebb52d
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.h
@@ -0,0 +1,15 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+@interface NADViewController : UIViewController
+
+@end
diff --git a/third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.mm b/third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.mm
new file mode 100644
index 0000000000..fd244799f8
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/objc/NADViewController.mm
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "NADViewController.h"
+
+#import "sdk/objc/base/RTCVideoRenderer.h"
+#import "sdk/objc/components/capturer/RTCCameraVideoCapturer.h"
+#import "sdk/objc/components/renderer/metal/RTCMTLVideoView.h"
+#import "sdk/objc/helpers/RTCCameraPreviewView.h"
+
+#include <memory>
+
+#include "examples/objcnativeapi/objc/objc_call_client.h"
+
+@interface NADViewController ()
+
+@property(nonatomic) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer;
+@property(nonatomic) RTC_OBJC_TYPE(RTCCameraPreviewView) * localVideoView;
+@property(nonatomic) __kindof UIView<RTC_OBJC_TYPE(RTCVideoRenderer)> *remoteVideoView;
+@property(nonatomic) UIButton *callButton;
+@property(nonatomic) UIButton *hangUpButton;
+
+@end
+
+@implementation NADViewController {
+ std::unique_ptr<webrtc_examples::ObjCCallClient> _call_client;
+
+ UIView *_view;
+}
+
+@synthesize capturer = _capturer;
+@synthesize localVideoView = _localVideoView;
+@synthesize remoteVideoView = _remoteVideoView;
+@synthesize callButton = _callButton;
+@synthesize hangUpButton = _hangUpButton;
+
+#pragma mark - View controller lifecycle
+
+- (void)loadView {
+ _view = [[UIView alloc] initWithFrame:CGRectZero];
+
+ _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
+ _remoteVideoView.translatesAutoresizingMaskIntoConstraints = NO;
+ [_view addSubview:_remoteVideoView];
+
+ _localVideoView = [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero];
+ _localVideoView.translatesAutoresizingMaskIntoConstraints = NO;
+ [_view addSubview:_localVideoView];
+
+ _callButton = [UIButton buttonWithType:UIButtonTypeSystem];
+ _callButton.translatesAutoresizingMaskIntoConstraints = NO;
+ [_callButton setTitle:@"Call" forState:UIControlStateNormal];
+ [_callButton addTarget:self action:@selector(call:) forControlEvents:UIControlEventTouchUpInside];
+ [_view addSubview:_callButton];
+
+ _hangUpButton = [UIButton buttonWithType:UIButtonTypeSystem];
+ _hangUpButton.translatesAutoresizingMaskIntoConstraints = NO;
+ [_hangUpButton setTitle:@"Hang up" forState:UIControlStateNormal];
+ [_hangUpButton addTarget:self
+ action:@selector(hangUp:)
+ forControlEvents:UIControlEventTouchUpInside];
+ [_view addSubview:_hangUpButton];
+
+ UILayoutGuide *margin = _view.layoutMarginsGuide;
+ [_remoteVideoView.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES;
+ [_remoteVideoView.topAnchor constraintEqualToAnchor:margin.topAnchor].active = YES;
+ [_remoteVideoView.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES;
+ [_remoteVideoView.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor].active = YES;
+
+ [_localVideoView.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor constant:8.0].active =
+ YES;
+ [_localVideoView.topAnchor constraintEqualToAnchor:margin.topAnchor constant:8.0].active = YES;
+ [_localVideoView.widthAnchor constraintEqualToConstant:60].active = YES;
+ [_localVideoView.heightAnchor constraintEqualToConstant:60].active = YES;
+
+ [_callButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor constant:8.0].active =
+ YES;
+ [_callButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:8.0].active = YES;
+ [_callButton.widthAnchor constraintEqualToConstant:100].active = YES;
+ [_callButton.heightAnchor constraintEqualToConstant:40].active = YES;
+
+ [_hangUpButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor constant:8.0].active =
+ YES;
+ [_hangUpButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:8.0].active =
+ YES;
+ [_hangUpButton.widthAnchor constraintEqualToConstant:100].active = YES;
+ [_hangUpButton.heightAnchor constraintEqualToConstant:40].active = YES;
+
+ self.view = _view;
+}
+
+- (void)viewDidLoad {
+ [super viewDidLoad];
+
+ self.capturer = [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] init];
+ self.localVideoView.captureSession = self.capturer.captureSession;
+
+ _call_client.reset(new webrtc_examples::ObjCCallClient());
+
+ // Start capturer.
+ AVCaptureDevice *selectedDevice = nil;
+ NSArray<AVCaptureDevice *> *captureDevices =
+ [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices];
+ for (AVCaptureDevice *device in captureDevices) {
+ if (device.position == AVCaptureDevicePositionFront) {
+ selectedDevice = device;
+ break;
+ }
+ }
+
+ AVCaptureDeviceFormat *selectedFormat = nil;
+ int targetWidth = 640;
+ int targetHeight = 480;
+ int currentDiff = INT_MAX;
+ NSArray<AVCaptureDeviceFormat *> *formats =
+ [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:selectedDevice];
+ for (AVCaptureDeviceFormat *format in formats) {
+ CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription);
+ int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height);
+ if (diff < currentDiff) {
+ selectedFormat = format;
+ currentDiff = diff;
+ } else if (diff == currentDiff && pixelFormat == [_capturer preferredOutputPixelFormat]) {
+ selectedFormat = format;
+ }
+ }
+
+ [self.capturer startCaptureWithDevice:selectedDevice format:selectedFormat fps:30];
+}
+
+- (void)didReceiveMemoryWarning {
+ [super didReceiveMemoryWarning];
+ // Dispose of any resources that can be recreated.
+}
+
+#pragma mark - Actions
+
+- (IBAction)call:(id)sender {
+ _call_client->Call(self.capturer, self.remoteVideoView);
+}
+
+- (IBAction)hangUp:(id)sender {
+ _call_client->Hangup();
+}
+
+@end
diff --git a/third_party/libwebrtc/examples/objcnativeapi/objc/main.m b/third_party/libwebrtc/examples/objcnativeapi/objc/main.m
new file mode 100644
index 0000000000..2c3b5fbbfb
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/objc/main.m
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+#import "NADAppDelegate.h"
+
+int main(int argc, char* argv[]) {
+ @autoreleasepool {
+ return UIApplicationMain(argc, argv, nil, NSStringFromClass([NADAppDelegate class]));
+ }
+}
diff --git a/third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.h b/third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.h
new file mode 100644
index 0000000000..cb8501d9ce
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_OBJCNATIVEAPI_OBJCCALLCLIENT_H_
+#define EXAMPLES_OBJCNATIVEAPI_OBJCCALLCLIENT_H_
+
+#include <memory>
+#include <string>
+
+#import "sdk/objc/base/RTCMacros.h"
+
+#include "api/peer_connection_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "rtc_base/synchronization/mutex.h"
+
+@class RTC_OBJC_TYPE(RTCVideoCapturer);
+@protocol RTC_OBJC_TYPE
+(RTCVideoRenderer);
+
+namespace webrtc_examples {
+
+class ObjCCallClient {
+ public:
+ ObjCCallClient();
+
+ void Call(RTC_OBJC_TYPE(RTCVideoCapturer) * capturer,
+ id<RTC_OBJC_TYPE(RTCVideoRenderer)> remote_renderer);
+ void Hangup();
+
+ private:
+ class PCObserver : public webrtc::PeerConnectionObserver {
+ public:
+ explicit PCObserver(ObjCCallClient* client);
+
+ void OnSignalingChange(webrtc::PeerConnectionInterface::SignalingState new_state) override;
+ void OnDataChannel(rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) override;
+ void OnRenegotiationNeeded() override;
+ void OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) override;
+ void OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) override;
+ void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
+
+ private:
+ ObjCCallClient* const client_;
+ };
+
+ void CreatePeerConnectionFactory() RTC_RUN_ON(thread_checker_);
+ void CreatePeerConnection() RTC_RUN_ON(thread_checker_);
+ void Connect() RTC_RUN_ON(thread_checker_);
+
+ webrtc::SequenceChecker thread_checker_;
+
+ bool call_started_ RTC_GUARDED_BY(thread_checker_);
+
+ const std::unique_ptr<PCObserver> pc_observer_;
+
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf_ RTC_GUARDED_BY(thread_checker_);
+ std::unique_ptr<rtc::Thread> network_thread_ RTC_GUARDED_BY(thread_checker_);
+ std::unique_ptr<rtc::Thread> worker_thread_ RTC_GUARDED_BY(thread_checker_);
+ std::unique_ptr<rtc::Thread> signaling_thread_ RTC_GUARDED_BY(thread_checker_);
+
+ std::unique_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> remote_sink_
+ RTC_GUARDED_BY(thread_checker_);
+ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> video_source_
+ RTC_GUARDED_BY(thread_checker_);
+
+ webrtc::Mutex pc_mutex_;
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_ RTC_GUARDED_BY(pc_mutex_);
+};
+
+} // namespace webrtc_examples
+
+#endif // EXAMPLES_OBJCNATIVEAPI_OBJCCALLCLIENT_H_
diff --git a/third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.mm b/third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.mm
new file mode 100644
index 0000000000..90bcfcc35b
--- /dev/null
+++ b/third_party/libwebrtc/examples/objcnativeapi/objc/objc_call_client.mm
@@ -0,0 +1,238 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/objcnativeapi/objc/objc_call_client.h"
+
+#include <memory>
+#include <utility>
+
+#import "sdk/objc/base/RTCVideoRenderer.h"
+#import "sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h"
+#import "sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h"
+#import "sdk/objc/helpers/RTCCameraPreviewView.h"
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "sdk/objc/native/api/video_capturer.h"
+#include "sdk/objc/native/api/video_decoder_factory.h"
+#include "sdk/objc/native/api/video_encoder_factory.h"
+#include "sdk/objc/native/api/video_renderer.h"
+
+namespace webrtc_examples {
+
+namespace {
+
+class CreateOfferObserver : public webrtc::CreateSessionDescriptionObserver {
+ public:
+ explicit CreateOfferObserver(rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc);
+
+ void OnSuccess(webrtc::SessionDescriptionInterface* desc) override;
+ void OnFailure(webrtc::RTCError error) override;
+
+ private:
+ const rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_;
+};
+
+class SetRemoteSessionDescriptionObserver : public webrtc::SetRemoteDescriptionObserverInterface {
+ public:
+ void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override;
+};
+
+class SetLocalSessionDescriptionObserver : public webrtc::SetLocalDescriptionObserverInterface {
+ public:
+ void OnSetLocalDescriptionComplete(webrtc::RTCError error) override;
+};
+
+} // namespace
+
+ObjCCallClient::ObjCCallClient()
+ : call_started_(false), pc_observer_(std::make_unique<PCObserver>(this)) {
+ thread_checker_.Detach();
+ CreatePeerConnectionFactory();
+}
+
+void ObjCCallClient::Call(RTC_OBJC_TYPE(RTCVideoCapturer) * capturer,
+ id<RTC_OBJC_TYPE(RTCVideoRenderer)> remote_renderer) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+
+ webrtc::MutexLock lock(&pc_mutex_);
+ if (call_started_) {
+ RTC_LOG(LS_WARNING) << "Call already started.";
+ return;
+ }
+ call_started_ = true;
+
+ remote_sink_ = webrtc::ObjCToNativeVideoRenderer(remote_renderer);
+
+ video_source_ =
+ webrtc::ObjCToNativeVideoCapturer(capturer, signaling_thread_.get(), worker_thread_.get());
+
+ CreatePeerConnection();
+ Connect();
+}
+
+void ObjCCallClient::Hangup() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+
+ call_started_ = false;
+
+ {
+ webrtc::MutexLock lock(&pc_mutex_);
+ if (pc_ != nullptr) {
+ pc_->Close();
+ pc_ = nullptr;
+ }
+ }
+
+ remote_sink_ = nullptr;
+ video_source_ = nullptr;
+}
+
+void ObjCCallClient::CreatePeerConnectionFactory() {
+ network_thread_ = rtc::Thread::CreateWithSocketServer();
+ network_thread_->SetName("network_thread", nullptr);
+ RTC_CHECK(network_thread_->Start()) << "Failed to start thread";
+
+ worker_thread_ = rtc::Thread::Create();
+ worker_thread_->SetName("worker_thread", nullptr);
+ RTC_CHECK(worker_thread_->Start()) << "Failed to start thread";
+
+ signaling_thread_ = rtc::Thread::Create();
+ signaling_thread_->SetName("signaling_thread", nullptr);
+ RTC_CHECK(signaling_thread_->Start()) << "Failed to start thread";
+
+ webrtc::PeerConnectionFactoryDependencies dependencies;
+ dependencies.network_thread = network_thread_.get();
+ dependencies.worker_thread = worker_thread_.get();
+ dependencies.signaling_thread = signaling_thread_.get();
+ dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory();
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.task_queue_factory = dependencies.task_queue_factory.get();
+ media_deps.audio_encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory();
+ media_deps.audio_decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory();
+ media_deps.video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory(
+ [[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init]);
+ media_deps.video_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(
+ [[RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) alloc] init]);
+ media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create();
+ dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
+ RTC_LOG(LS_INFO) << "Media engine created: " << dependencies.media_engine.get();
+ dependencies.call_factory = webrtc::CreateCallFactory();
+ dependencies.event_log_factory =
+ std::make_unique<webrtc::RtcEventLogFactory>(dependencies.task_queue_factory.get());
+ pcf_ = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies));
+ RTC_LOG(LS_INFO) << "PeerConnectionFactory created: " << pcf_.get();
+}
+
+void ObjCCallClient::CreatePeerConnection() {
+ webrtc::MutexLock lock(&pc_mutex_);
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ // Encryption has to be disabled for loopback to work.
+ webrtc::PeerConnectionFactoryInterface::Options options;
+ options.disable_encryption = true;
+ pcf_->SetOptions(options);
+ webrtc::PeerConnectionDependencies pc_dependencies(pc_observer_.get());
+ pc_ = pcf_->CreatePeerConnectionOrError(config, std::move(pc_dependencies)).MoveValue();
+ RTC_LOG(LS_INFO) << "PeerConnection created: " << pc_.get();
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> local_video_track =
+ pcf_->CreateVideoTrack(video_source_, "video");
+ pc_->AddTransceiver(local_video_track);
+ RTC_LOG(LS_INFO) << "Local video sink set up: " << local_video_track.get();
+
+ for (const rtc::scoped_refptr<webrtc::RtpTransceiverInterface>& tranceiver :
+ pc_->GetTransceivers()) {
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> track = tranceiver->receiver()->track();
+ if (track && track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
+ static_cast<webrtc::VideoTrackInterface*>(track.get())
+ ->AddOrUpdateSink(remote_sink_.get(), rtc::VideoSinkWants());
+ RTC_LOG(LS_INFO) << "Remote video sink set up: " << track.get();
+ break;
+ }
+ }
+}
+
+void ObjCCallClient::Connect() {
+ webrtc::MutexLock lock(&pc_mutex_);
+ pc_->CreateOffer(rtc::make_ref_counted<CreateOfferObserver>(pc_).get(),
+ webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
+}
+
+ObjCCallClient::PCObserver::PCObserver(ObjCCallClient* client) : client_(client) {}
+
+void ObjCCallClient::PCObserver::OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) {
+ RTC_LOG(LS_INFO) << "OnSignalingChange: " << new_state;
+}
+
+void ObjCCallClient::PCObserver::OnDataChannel(
+ rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) {
+ RTC_LOG(LS_INFO) << "OnDataChannel";
+}
+
+void ObjCCallClient::PCObserver::OnRenegotiationNeeded() {
+ RTC_LOG(LS_INFO) << "OnRenegotiationNeeded";
+}
+
+void ObjCCallClient::PCObserver::OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) {
+ RTC_LOG(LS_INFO) << "OnIceConnectionChange: " << new_state;
+}
+
+void ObjCCallClient::PCObserver::OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) {
+ RTC_LOG(LS_INFO) << "OnIceGatheringChange: " << new_state;
+}
+
+void ObjCCallClient::PCObserver::OnIceCandidate(const webrtc::IceCandidateInterface* candidate) {
+ RTC_LOG(LS_INFO) << "OnIceCandidate: " << candidate->server_url();
+ webrtc::MutexLock lock(&client_->pc_mutex_);
+ RTC_DCHECK(client_->pc_ != nullptr);
+ client_->pc_->AddIceCandidate(candidate);
+}
+
+CreateOfferObserver::CreateOfferObserver(rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc)
+ : pc_(pc) {}
+
+void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) {
+ std::string sdp;
+ desc->ToString(&sdp);
+ RTC_LOG(LS_INFO) << "Created offer: " << sdp;
+
+ // Ownership of desc was transferred to us, now we transfer it forward.
+ pc_->SetLocalDescription(absl::WrapUnique(desc),
+ rtc::make_ref_counted<SetLocalSessionDescriptionObserver>());
+
+ // Generate a fake answer.
+ std::unique_ptr<webrtc::SessionDescriptionInterface> answer(
+ webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp));
+ pc_->SetRemoteDescription(std::move(answer),
+ rtc::make_ref_counted<SetRemoteSessionDescriptionObserver>());
+}
+
+void CreateOfferObserver::OnFailure(webrtc::RTCError error) {
+ RTC_LOG(LS_INFO) << "Failed to create offer: " << error.message();
+}
+
+void SetRemoteSessionDescriptionObserver::OnSetRemoteDescriptionComplete(webrtc::RTCError error) {
+ RTC_LOG(LS_INFO) << "Set remote description: " << error.message();
+}
+
+void SetLocalSessionDescriptionObserver::OnSetLocalDescriptionComplete(webrtc::RTCError error) {
+ RTC_LOG(LS_INFO) << "Set local description: " << error.message();
+}
+
+} // namespace webrtc_examples