summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/sdk/objc
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:22:09 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:22:09 +0000
commit43a97878ce14b72f0981164f87f2e35e14151312 (patch)
tree620249daf56c0258faa40cbdcf9cfba06de2a846 /third_party/libwebrtc/sdk/objc
parentInitial commit. (diff)
downloadfirefox-43a97878ce14b72f0981164f87f2e35e14151312.tar.xz
firefox-43a97878ce14b72f0981164f87f2e35e14151312.zip
Adding upstream version 110.0.1.upstream/110.0.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/sdk/objc')
-rw-r--r--third_party/libwebrtc/sdk/objc/DEPS18
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Classes/Common/NSString+StdString.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Classes/Common/scoped_cftyperef.h12
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Native/api/audio_device_module.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Native/api/video_decoder_factory.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Native/api/video_encoder_factory.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Native/api/video_frame_buffer.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_decoder_factory.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_encoder_factory.h11
-rw-r--r--third_party/libwebrtc/sdk/objc/Info.plist26
-rw-r--r--third_party/libwebrtc/sdk/objc/OWNERS9
-rw-r--r--third_party/libwebrtc/sdk/objc/README.md37
-rw-r--r--third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter+Private.h41
-rw-r--r--third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.mm67
-rw-r--r--third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.h41
-rw-r--r--third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.mm151
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource+Private.h34
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.h32
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.mm52
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h31
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.h28
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.mm67
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.h44
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.mm72
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Native.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Private.h79
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.h273
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.mm549
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.h63
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.mm33
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel+Private.h52
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.h132
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.mm220
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h52
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm87
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.h71
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.mm74
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm130
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.h32
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.mm58
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.h74
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.mm170
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h36
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.h49
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.mm76
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent+Private.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.h42
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.mm42
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer+Private.h31
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.h114
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.mm196
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h37
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm60
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h34
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.h46
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.mm90
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource+Private.h42
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.h34
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.mm82
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream+Private.h37
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.h49
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.mm157
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h62
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h50
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm161
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.h23
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.mm34
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h48
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm43
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm34
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h143
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm102
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.h398
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.mm939
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h85
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h35
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h105
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm322
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h21
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm49
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h48
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm72
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h38
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm56
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.h30
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.mm40
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h73
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm113
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h76
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm128
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h33
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm43
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.h58
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.mm121
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h32
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h52
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.h86
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.mm159
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Native.h33
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Private.h31
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.h54
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.mm132
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h46
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.h137
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm190
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.h20
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.mm26
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h42
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.h48
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.mm103
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h19
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.h55
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.mm193
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.h21
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.mm29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm38
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm52
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource+Private.h51
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.h37
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.mm92
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h30
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.h38
-rw-r--r--third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.mm126
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.h17
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm18
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.mm27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm39
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm31
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm39
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm62
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm86
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h23
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm138
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm31
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCCodecSpecificInfo.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.h52
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.m29
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCI420Buffer.h22
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCLogging.h67
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCLogging.mm48
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCMacros.h62
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCMutableI420Buffer.h23
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCMutableYUVPlanarBuffer.h28
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCSSLCertificateVerifier.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.h35
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.m24
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.h36
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.m65
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoDecoder.h40
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoDecoderFactory.h32
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoEncoder.h59
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderFactory.h52
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.h28
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.m26
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.h42
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.m25
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.h86
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.mm78
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoFrameBuffer.h32
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCVideoRenderer.h43
-rw-r--r--third_party/libwebrtc/sdk/objc/base/RTCYUVPlanarBuffer.h46
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Configuration.mm176
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Private.h95
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.h265
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.mm1000
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.h48
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.m133
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h33
-rw-r--r--third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm89
-rw-r--r--third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.h56
-rw-r--r--third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.m535
-rw-r--r--third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.h51
-rw-r--r--third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m215
-rw-r--r--third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor+Private.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.mm126
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h17
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm170
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m122
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h18
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm164
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h22
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm164
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h33
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.h61
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm328
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.h44
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.m265
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.h23
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm207
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m59
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h45
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m295
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm157
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h42
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m199
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h33
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m113
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h37
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.h21
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm189
-rw-r--r--third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h39
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h25
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm29
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m85
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h31
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m102
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h60
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm120
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h18
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m49
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.h18
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm276
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h18
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m49
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.h22
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm819
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.h19
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.mm205
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/helpers.cc90
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/helpers.h47
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.cc327
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.h113
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h52
-rw-r--r--third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm352
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.h23
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.mm51
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.h34
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.mm45
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.h30
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.m123
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher+Private.h18
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.h46
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.m65
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.h111
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.mm171
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/noop.mm13
-rw-r--r--third_party/libwebrtc/sdk/objc/helpers/scoped_cftyperef.h116
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/audio_device_module.h30
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/audio_device_module.mm29
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.mm30
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.h26
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.mm48
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_capturer.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_capturer.mm35
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.mm24
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.h27
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.mm24
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_frame.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_frame.mm21
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.h29
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.mm28
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_renderer.h28
-rw-r--r--third_party/libwebrtc/sdk/objc/native/api/video_renderer.mm24
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.h308
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.mm1165
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.h143
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.mm669
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/audio_session_observer.h41
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/helpers.h76
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/helpers.mm109
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.h141
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.mm488
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/network_monitor_observer.h42
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.h50
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.mm86
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.h67
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.mm95
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.h41
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.mm123
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.h44
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.mm209
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.h24
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.mm28
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.h39
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.mm38
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.h59
-rw-r--r--third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.mm131
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm469
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCAudioDeviceModule_xctest.mm593
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCAudioDevice_xctest.mm115
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCAudioSessionTest.mm324
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm308
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCCallbackLogger_xctest.m244
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm569
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCCertificateTest.mm73
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCConfigurationTest.mm162
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm51
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m30
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCEncodedImage_xctest.mm55
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm114
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m48
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCIceCandidateTest.mm60
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCIceServerTest.mm136
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCMTLVideoView_xctest.m298
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCMediaConstraintsTest.mm58
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCNV12TextureCache_xctest.m55
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm72
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m381
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionTest.mm204
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCSessionDescriptionTest.mm122
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/RTCTracingTest.mm41
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/audio_short16.pcmbin0 -> 643632 bytes
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/audio_short44.pcmbin0 -> 1774010 bytes
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/audio_short48.pcmbin0 -> 1930896 bytes
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/avformatmappertests.mm243
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/foreman.mp4bin0 -> 546651 bytes
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.h22
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.mm126
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/main.mm24
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/nalu_rewriter_xctest.mm374
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/objc_video_decoder_factory_tests.mm107
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/objc_video_encoder_factory_tests.mm148
-rw-r--r--third_party/libwebrtc/sdk/objc/unittests/scoped_cftyperef_tests.mm113
352 files changed, 32341 insertions, 0 deletions
diff --git a/third_party/libwebrtc/sdk/objc/DEPS b/third_party/libwebrtc/sdk/objc/DEPS
new file mode 100644
index 0000000000..4cff92caf1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/DEPS
@@ -0,0 +1,18 @@
+include_rules = [
+ "+base",
+ "+components",
+ "+helpers",
+ "+sdk",
+ "+common_video/h264",
+ "+common_video/include",
+ "+common_video/libyuv/include",
+ "+logging/rtc_event_log/rtc_event_log_factory.h",
+ "+media",
+ "+modules/video_coding",
+ "+pc",
+ "+system_wrappers",
+ "+modules/audio_device",
+ "+modules/audio_processing",
+ "+native",
+ "+third_party/libyuv",
+]
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Classes/Common/NSString+StdString.h b/third_party/libwebrtc/sdk/objc/Framework/Classes/Common/NSString+StdString.h
new file mode 100644
index 0000000000..3ec1b613ef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Classes/Common/NSString+StdString.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "helpers/NSString+StdString.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Classes/Common/scoped_cftyperef.h b/third_party/libwebrtc/sdk/objc/Framework/Classes/Common/scoped_cftyperef.h
new file mode 100644
index 0000000000..e5e376b0bc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Classes/Common/scoped_cftyperef.h
@@ -0,0 +1,12 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import "helpers/scoped_cftyperef.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h b/third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h
new file mode 100644
index 0000000000..529aa8dcf5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "api/peerconnection/RTCConfiguration+Native.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h b/third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h
new file mode 100644
index 0000000000..222e06ef33
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "api/peerconnection/RTCPeerConnectionFactory+Native.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h b/third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h
new file mode 100644
index 0000000000..136d7003c6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCDefaultShader.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "components/renderer/opengl/RTCDefaultShader.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.h b/third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.h
new file mode 100644
index 0000000000..4ba1caa41d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "components/renderer/opengl/RTCNV12TextureCache.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h b/third_party/libwebrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h
new file mode 100644
index 0000000000..21281f36ac
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "components/video_codec/nalu_rewriter.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Native/api/audio_device_module.h b/third_party/libwebrtc/sdk/objc/Framework/Native/api/audio_device_module.h
new file mode 100644
index 0000000000..7b448024de
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Native/api/audio_device_module.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "native/api/audio_device_module.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_decoder_factory.h b/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_decoder_factory.h
new file mode 100644
index 0000000000..ca9371c54d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_decoder_factory.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "native/api/video_decoder_factory.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_encoder_factory.h b/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_encoder_factory.h
new file mode 100644
index 0000000000..35e1e6c99f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_encoder_factory.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "native/api/video_encoder_factory.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_frame_buffer.h b/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_frame_buffer.h
new file mode 100644
index 0000000000..0e862cfa07
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Native/api/video_frame_buffer.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "native/api/video_frame_buffer.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_decoder_factory.h b/third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_decoder_factory.h
new file mode 100644
index 0000000000..bd8513c342
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_decoder_factory.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "native/src/objc_video_decoder_factory.h"
diff --git a/third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_encoder_factory.h b/third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_encoder_factory.h
new file mode 100644
index 0000000000..b6bd650a4f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Framework/Native/src/objc_video_encoder_factory.h
@@ -0,0 +1,11 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "native/src/objc_video_encoder_factory.h"
diff --git a/third_party/libwebrtc/sdk/objc/Info.plist b/third_party/libwebrtc/sdk/objc/Info.plist
new file mode 100644
index 0000000000..38c437e7fe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/Info.plist
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleExecutable</key>
+ <string>WebRTC</string>
+ <key>CFBundleIdentifier</key>
+ <string>org.webrtc.WebRTC</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>WebRTC</string>
+ <key>CFBundlePackageType</key>
+ <string>FMWK</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>NSPrincipalClass</key>
+ <string></string>
+</dict>
+</plist>
diff --git a/third_party/libwebrtc/sdk/objc/OWNERS b/third_party/libwebrtc/sdk/objc/OWNERS
new file mode 100644
index 0000000000..6af9062b2d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/OWNERS
@@ -0,0 +1,9 @@
+# Normal code changes.
+kthelgason@webrtc.org
+andersc@webrtc.org
+peterhanspers@webrtc.org
+denicija@webrtc.org
+
+# Rubberstamps of e.g. reverts and critical bug fixes.
+magjed@webrtc.org
+tkchin@webrtc.org
diff --git a/third_party/libwebrtc/sdk/objc/README.md b/third_party/libwebrtc/sdk/objc/README.md
new file mode 100644
index 0000000000..ff294a266f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/README.md
@@ -0,0 +1,37 @@
+# WebRTC Obj-C SDK
+
+This directory contains the Obj-C SDK for WebRTC. This includes wrappers for the
+C++ PeerConnection API and some platform specific components for iOS and macOS.
+
+## Organization
+
+- api/
+
+ Wrappers around classes and functions in the C++ API for creating and
+ configuring peer connections, etc.
+
+- base/
+
+ This directory contains some base protocols and classes that are used by both
+ the platform specific components and the SDK wrappers.
+
+- components/
+
+ These are the platform specific components. Contains components for handling
+ audio, capturing and rendering video, encoding and decoding using the
+ platform's hardware codec implementation and for representing video frames
+ in the platform's native format.
+
+- helpers/
+
+ These files are not WebRTC specific, but are general helper classes and
+ utilities for the Cocoa platforms.
+
+- native/
+
+ APIs for wrapping the platform specific components and using them with the
+ C++ API.
+
+- unittests/
+
+ This directory contains the tests.
diff --git a/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter+Private.h b/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter+Private.h
new file mode 100644
index 0000000000..9b123d2d05
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter+Private.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoRendererAdapter.h"
+
+#import "base/RTCVideoRenderer.h"
+
+#include "api/media_stream_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCVideoRendererAdapter ()
+
+/**
+ * The Objective-C video renderer passed to this adapter during construction.
+ * Calls made to the webrtc::VideoRenderInterface will be adapted and passed to
+ * this video renderer.
+ */
+@property(nonatomic, readonly) id<RTC_OBJC_TYPE(RTCVideoRenderer)> videoRenderer;
+
+/**
+ * The native VideoSinkInterface surface exposed by this adapter. Calls made
+ * to this interface will be adapted and passed to the RTCVideoRenderer supplied
+ * during construction. This pointer is unsafe and owned by this class.
+ */
+@property(nonatomic, readonly) rtc::VideoSinkInterface<webrtc::VideoFrame> *nativeVideoRenderer;
+
+/** Initialize an RTCVideoRendererAdapter with an RTCVideoRenderer. */
+- (instancetype)initWithNativeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoRenderer
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.h b/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.h
new file mode 100644
index 0000000000..b0b6f04488
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/*
+ * Creates a rtc::VideoSinkInterface surface for an RTCVideoRenderer. The
+ * rtc::VideoSinkInterface is used by WebRTC rendering code - this
+ * adapter adapts calls made to that interface to the RTCVideoRenderer supplied
+ * during construction.
+ */
+@interface RTCVideoRendererAdapter : NSObject
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.mm b/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.mm
new file mode 100644
index 0000000000..ef02f72f60
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/RTCVideoRendererAdapter.mm
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoRendererAdapter+Private.h"
+#import "base/RTCVideoFrame.h"
+
+#include <memory>
+
+#include "sdk/objc/native/api/video_frame.h"
+
+namespace webrtc {
+
+class VideoRendererAdapter
+ : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ VideoRendererAdapter(RTCVideoRendererAdapter* adapter) {
+ adapter_ = adapter;
+ size_ = CGSizeZero;
+ }
+
+ void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
+ RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame);
+
+ CGSize current_size = (videoFrame.rotation % 180 == 0)
+ ? CGSizeMake(videoFrame.width, videoFrame.height)
+ : CGSizeMake(videoFrame.height, videoFrame.width);
+
+ if (!CGSizeEqualToSize(size_, current_size)) {
+ size_ = current_size;
+ [adapter_.videoRenderer setSize:size_];
+ }
+ [adapter_.videoRenderer renderFrame:videoFrame];
+ }
+
+ private:
+ __weak RTCVideoRendererAdapter *adapter_;
+ CGSize size_;
+};
+}
+
+@implementation RTCVideoRendererAdapter {
+ std::unique_ptr<webrtc::VideoRendererAdapter> _adapter;
+}
+
+@synthesize videoRenderer = _videoRenderer;
+
+- (instancetype)initWithNativeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoRenderer {
+ NSParameterAssert(videoRenderer);
+ if (self = [super init]) {
+ _videoRenderer = videoRenderer;
+ _adapter.reset(new webrtc::VideoRendererAdapter(self));
+ }
+ return self;
+}
+
+- (rtc::VideoSinkInterface<webrtc::VideoFrame> *)nativeVideoRenderer {
+ return _adapter.get();
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.h b/third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.h
new file mode 100644
index 0000000000..c1aeb825cb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCLogging.h"
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+typedef void (^RTCCallbackLoggerMessageHandler)(NSString *message);
+typedef void (^RTCCallbackLoggerMessageAndSeverityHandler)(NSString *message,
+ RTCLoggingSeverity severity);
+
+// This class intercepts WebRTC logs and forwards them to a registered block.
+// This class is not threadsafe.
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCCallbackLogger) : NSObject
+
+// The severity level to capture. The default is kRTCLoggingSeverityInfo.
+@property(nonatomic, assign) RTCLoggingSeverity severity;
+
+// The callback handler will be called on the same thread that does the
+// logging, so if the logging callback can be slow it may be a good idea
+// to implement dispatching to some other queue.
+- (void)start:(nullable RTCCallbackLoggerMessageHandler)handler;
+- (void)startWithMessageAndSeverityHandler:
+ (nullable RTCCallbackLoggerMessageAndSeverityHandler)handler;
+
+- (void)stop;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.mm b/third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.mm
new file mode 100644
index 0000000000..ba6fe1b1cc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/logging/RTCCallbackLogger.mm
@@ -0,0 +1,151 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCCallbackLogger.h"
+
+#import "helpers/NSString+StdString.h"
+
+#include <memory>
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/log_sinks.h"
+#include "rtc_base/logging.h"
+
+namespace {
+
+class CallbackLogSink final : public rtc::LogSink {
+ public:
+ CallbackLogSink(RTCCallbackLoggerMessageHandler callbackHandler)
+ : callback_handler_(callbackHandler) {}
+
+ void OnLogMessage(const std::string& message) override {
+ OnLogMessage(absl::string_view(message));
+ }
+
+ void OnLogMessage(absl::string_view message) override {
+ if (callback_handler_) {
+ callback_handler_([NSString stringForAbslStringView:message]);
+ }
+ }
+
+ private:
+ RTCCallbackLoggerMessageHandler callback_handler_;
+};
+
+class CallbackWithSeverityLogSink final : public rtc::LogSink {
+ public:
+ CallbackWithSeverityLogSink(RTCCallbackLoggerMessageAndSeverityHandler callbackHandler)
+ : callback_handler_(callbackHandler) {}
+
+ void OnLogMessage(const std::string& message) override { RTC_DCHECK_NOTREACHED(); }
+
+ void OnLogMessage(const std::string& message, rtc::LoggingSeverity severity) override {
+ OnLogMessage(absl::string_view(message), severity);
+ }
+
+ void OnLogMessage(absl::string_view message, rtc::LoggingSeverity severity) override {
+ if (callback_handler_) {
+ RTCLoggingSeverity loggingSeverity = NativeSeverityToObjcSeverity(severity);
+ callback_handler_([NSString stringForAbslStringView:message], loggingSeverity);
+ }
+ }
+
+ private:
+ static RTCLoggingSeverity NativeSeverityToObjcSeverity(rtc::LoggingSeverity severity) {
+ switch (severity) {
+ case rtc::LS_VERBOSE:
+ return RTCLoggingSeverityVerbose;
+ case rtc::LS_INFO:
+ return RTCLoggingSeverityInfo;
+ case rtc::LS_WARNING:
+ return RTCLoggingSeverityWarning;
+ case rtc::LS_ERROR:
+ return RTCLoggingSeverityError;
+ case rtc::LS_NONE:
+ return RTCLoggingSeverityNone;
+ }
+ }
+
+ RTCCallbackLoggerMessageAndSeverityHandler callback_handler_;
+};
+
+}
+
+@implementation RTC_OBJC_TYPE (RTCCallbackLogger) {
+ BOOL _hasStarted;
+ std::unique_ptr<rtc::LogSink> _logSink;
+}
+
+@synthesize severity = _severity;
+
+- (instancetype)init {
+ self = [super init];
+ if (self != nil) {
+ _severity = RTCLoggingSeverityInfo;
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self stop];
+}
+
+- (void)start:(nullable RTCCallbackLoggerMessageHandler)handler {
+ if (_hasStarted) {
+ return;
+ }
+
+ _logSink.reset(new CallbackLogSink(handler));
+
+ rtc::LogMessage::AddLogToStream(_logSink.get(), [self rtcSeverity]);
+ _hasStarted = YES;
+}
+
+- (void)startWithMessageAndSeverityHandler:
+ (nullable RTCCallbackLoggerMessageAndSeverityHandler)handler {
+ if (_hasStarted) {
+ return;
+ }
+
+ _logSink.reset(new CallbackWithSeverityLogSink(handler));
+
+ rtc::LogMessage::AddLogToStream(_logSink.get(), [self rtcSeverity]);
+ _hasStarted = YES;
+}
+
+- (void)stop {
+ if (!_hasStarted) {
+ return;
+ }
+ RTC_DCHECK(_logSink);
+ rtc::LogMessage::RemoveLogToStream(_logSink.get());
+ _hasStarted = NO;
+ _logSink.reset();
+}
+
+#pragma mark - Private
+
+- (rtc::LoggingSeverity)rtcSeverity {
+ switch (_severity) {
+ case RTCLoggingSeverityVerbose:
+ return rtc::LS_VERBOSE;
+ case RTCLoggingSeverityInfo:
+ return rtc::LS_INFO;
+ case RTCLoggingSeverityWarning:
+ return rtc::LS_WARNING;
+ case RTCLoggingSeverityError:
+ return rtc::LS_ERROR;
+ case RTCLoggingSeverityNone:
+ return rtc::LS_NONE;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource+Private.h
new file mode 100644
index 0000000000..2c333f9d73
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource+Private.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioSource.h"
+
+#import "RTCMediaSource+Private.h"
+
+@interface RTC_OBJC_TYPE (RTCAudioSource)
+()
+
+ /**
+ * The AudioSourceInterface object passed to this RTCAudioSource during
+ * construction.
+ */
+ @property(nonatomic,
+ readonly) rtc::scoped_refptr<webrtc::AudioSourceInterface> nativeAudioSource;
+
+/** Initialize an RTCAudioSource from a native AudioSourceInterface. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeAudioSource:(rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource
+ NS_DESIGNATED_INITIALIZER;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
+ type:(RTCMediaSourceType)type NS_UNAVAILABLE;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.h
new file mode 100644
index 0000000000..9272fdf2d8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCMediaSource.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCAudioSource) : RTC_OBJC_TYPE(RTCMediaSource)
+
+- (instancetype)init NS_UNAVAILABLE;
+
+// Sets the volume for the RTCMediaSource. `volume` is a gain value in the range
+// [0, 10].
+// Temporary fix to be able to modify volume of remote audio tracks.
+// TODO(kthelgason): Property stays here temporarily until a proper volume-api
+// is available on the surface exposed by webrtc.
+@property(nonatomic, assign) double volume;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.mm
new file mode 100644
index 0000000000..1541045099
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioSource.mm
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioSource+Private.h"
+
+#include "rtc_base/checks.h"
+
+@implementation RTC_OBJC_TYPE (RTCAudioSource) {
+}
+
+@synthesize volume = _volume;
+@synthesize nativeAudioSource = _nativeAudioSource;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeAudioSource:
+ (rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource {
+ RTC_DCHECK(factory);
+ RTC_DCHECK(nativeAudioSource);
+
+ if (self = [super initWithFactory:factory
+ nativeMediaSource:nativeAudioSource
+ type:RTCMediaSourceTypeAudio]) {
+ _nativeAudioSource = nativeAudioSource;
+ }
+ return self;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
+ type:(RTCMediaSourceType)type {
+ RTC_DCHECK_NOTREACHED();
+ return nil;
+}
+
+- (NSString *)description {
+ NSString *stateString = [[self class] stringForState:self.state];
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCAudioSource)( %p ): %@", self, stateString];
+}
+
+- (void)setVolume:(double)volume {
+ _volume = volume;
+ _nativeAudioSource->SetVolume(volume);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h
new file mode 100644
index 0000000000..6495500484
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioTrack.h"
+
+#include "api/media_stream_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+@interface RTC_OBJC_TYPE (RTCAudioTrack)
+()
+
+ /** AudioTrackInterface created or passed in at construction. */
+ @property(nonatomic, readonly) rtc::scoped_refptr<webrtc::AudioTrackInterface> nativeAudioTrack;
+
+/** Initialize an RTCAudioTrack with an id. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ source:(RTC_OBJC_TYPE(RTCAudioSource) *)source
+ trackId:(NSString *)trackId;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.h
new file mode 100644
index 0000000000..95eb5d3d48
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMacros.h"
+#import "RTCMediaStreamTrack.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCAudioSource);
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCAudioTrack) : RTC_OBJC_TYPE(RTCMediaStreamTrack)
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** The audio source for this audio track. */
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioSource) * source;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.mm
new file mode 100644
index 0000000000..5c1736f436
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCAudioTrack.mm
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioTrack+Private.h"
+
+#import "RTCAudioSource+Private.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "helpers/NSString+StdString.h"
+
+#include "rtc_base/checks.h"
+
+@implementation RTC_OBJC_TYPE (RTCAudioTrack)
+
+@synthesize source = _source;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ source:(RTC_OBJC_TYPE(RTCAudioSource) *)source
+ trackId:(NSString *)trackId {
+ RTC_DCHECK(factory);
+ RTC_DCHECK(source);
+ RTC_DCHECK(trackId.length);
+
+ std::string nativeId = [NSString stdStringForString:trackId];
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
+ factory.nativeFactory->CreateAudioTrack(nativeId, source.nativeAudioSource.get());
+ if (self = [self initWithFactory:factory nativeTrack:track type:RTCMediaStreamTrackTypeAudio]) {
+ _source = source;
+ }
+ return self;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+ type:(RTCMediaStreamTrackType)type {
+ NSParameterAssert(factory);
+ NSParameterAssert(nativeTrack);
+ NSParameterAssert(type == RTCMediaStreamTrackTypeAudio);
+ return [super initWithFactory:factory nativeTrack:nativeTrack type:type];
+}
+
+- (RTC_OBJC_TYPE(RTCAudioSource) *)source {
+ if (!_source) {
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source(self.nativeAudioTrack->GetSource());
+ if (source) {
+ _source = [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self.factory
+ nativeAudioSource:source];
+ }
+ }
+ return _source;
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::AudioTrackInterface>)nativeAudioTrack {
+ return rtc::scoped_refptr<webrtc::AudioTrackInterface>(
+ static_cast<webrtc::AudioTrackInterface *>(self.nativeTrack.get()));
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.h
new file mode 100644
index 0000000000..5ac8984d4a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCCertificate) : NSObject <NSCopying>
+
+/** Private key in PEM. */
+@property(nonatomic, readonly, copy) NSString *private_key;
+
+/** Public key in an x509 cert encoded in PEM. */
+@property(nonatomic, readonly, copy) NSString *certificate;
+
+/**
+ * Initialize an RTCCertificate with PEM strings for private_key and certificate.
+ */
+- (instancetype)initWithPrivateKey:(NSString *)private_key
+ certificate:(NSString *)certificate NS_DESIGNATED_INITIALIZER;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Generate a new certificate for 're' use.
+ *
+ * Optional dictionary of parameters. Defaults to KeyType ECDSA if none are
+ * provided.
+ * - name: "ECDSA" or "RSASSA-PKCS1-v1_5"
+ */
++ (nullable RTC_OBJC_TYPE(RTCCertificate) *)generateCertificateWithParams:(NSDictionary *)params;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.mm
new file mode 100644
index 0000000000..e5c33e407c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCertificate.mm
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCCertificate.h"
+
+#import "base/RTCLogging.h"
+
+#include "rtc_base/logging.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/ssl_identity.h"
+
+@implementation RTC_OBJC_TYPE (RTCCertificate)
+
+@synthesize private_key = _private_key;
+@synthesize certificate = _certificate;
+
+- (id)copyWithZone:(NSZone *)zone {
+ id copy = [[[self class] alloc] initWithPrivateKey:[self.private_key copyWithZone:zone]
+ certificate:[self.certificate copyWithZone:zone]];
+ return copy;
+}
+
+- (instancetype)initWithPrivateKey:(NSString *)private_key certificate:(NSString *)certificate {
+ if (self = [super init]) {
+ _private_key = [private_key copy];
+ _certificate = [certificate copy];
+ }
+ return self;
+}
+
++ (nullable RTC_OBJC_TYPE(RTCCertificate) *)generateCertificateWithParams:(NSDictionary *)params {
+ rtc::KeyType keyType = rtc::KT_ECDSA;
+ NSString *keyTypeString = [params valueForKey:@"name"];
+ if (keyTypeString && [keyTypeString isEqualToString:@"RSASSA-PKCS1-v1_5"]) {
+ keyType = rtc::KT_RSA;
+ }
+
+ NSNumber *expires = [params valueForKey:@"expires"];
+ rtc::scoped_refptr<rtc::RTCCertificate> cc_certificate = nullptr;
+ if (expires != nil) {
+ uint64_t expirationTimestamp = [expires unsignedLongLongValue];
+ cc_certificate = rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType),
+ expirationTimestamp);
+ } else {
+ cc_certificate =
+ rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType), absl::nullopt);
+ }
+ if (!cc_certificate) {
+ RTCLogError(@"Failed to generate certificate.");
+ return nullptr;
+ }
+ // grab PEMs and create an NS RTCCerticicate
+ rtc::RTCCertificatePEM pem = cc_certificate->ToPEM();
+ std::string pem_private_key = pem.private_key();
+ std::string pem_certificate = pem.certificate();
+ RTC_LOG(LS_INFO) << "CERT PEM ";
+ RTC_LOG(LS_INFO) << pem_certificate;
+
+ RTC_OBJC_TYPE(RTCCertificate) *cert =
+ [[RTC_OBJC_TYPE(RTCCertificate) alloc] initWithPrivateKey:@(pem_private_key.c_str())
+ certificate:@(pem_certificate.c_str())];
+ return cert;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Native.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Native.h
new file mode 100644
index 0000000000..07c0da6041
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Native.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCConfiguration.h"
+
+#include "api/peer_connection_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCConfiguration)
+()
+
+ /** Optional TurnCustomizer.
+ * With this class one can modify outgoing TURN messages.
+ * The object passed in must remain valid until PeerConnection::Close() is
+ * called.
+ */
+ @property(nonatomic, nullable) webrtc::TurnCustomizer* turnCustomizer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Private.h
new file mode 100644
index 0000000000..70a6532dbc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration+Private.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCConfiguration.h"
+
+#include "api/peer_connection_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCConfiguration)
+()
+
+ + (webrtc::PeerConnectionInterface::IceTransportsType)nativeTransportsTypeForTransportPolicy
+ : (RTCIceTransportPolicy)policy;
+
++ (RTCIceTransportPolicy)transportPolicyForTransportsType:
+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeType;
+
++ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy;
+
++ (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy:
+ (RTCBundlePolicy)policy;
+
++ (RTCBundlePolicy)bundlePolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy;
+
++ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy;
+
++ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy:
+ (RTCRtcpMuxPolicy)policy;
+
++ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy;
+
++ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy;
+
++ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeTcpCandidatePolicyForPolicy:
+ (RTCTcpCandidatePolicy)policy;
+
++ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy;
+
++ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy;
+
++ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativeCandidateNetworkPolicyForPolicy:
+ (RTCCandidateNetworkPolicy)policy;
+
++ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy;
+
++ (NSString *)stringForCandidateNetworkPolicy:(RTCCandidateNetworkPolicy)policy;
+
++ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:(RTCEncryptionKeyType)keyType;
+
++ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics:(RTCSdpSemantics)sdpSemantics;
+
++ (RTCSdpSemantics)sdpSemanticsForNativeSdpSemantics:(webrtc::SdpSemantics)sdpSemantics;
+
++ (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics;
+
+/**
+ * RTCConfiguration struct representation of this RTCConfiguration.
+ * This is needed to pass to the underlying C++ APIs.
+ */
+- (nullable webrtc::PeerConnectionInterface::RTCConfiguration *)createNativeConfiguration;
+
+- (instancetype)initWithNativeConfiguration:
+ (const webrtc::PeerConnectionInterface::RTCConfiguration &)config NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.h
new file mode 100644
index 0000000000..345bf179bc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.h
@@ -0,0 +1,273 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCCertificate.h"
+#import "RTCCryptoOptions.h"
+#import "RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCIceServer);
+
+/**
+ * Represents the ice transport policy. This exposes the same states in C++,
+ * which include one more state than what exists in the W3C spec.
+ */
+typedef NS_ENUM(NSInteger, RTCIceTransportPolicy) {
+ RTCIceTransportPolicyNone,
+ RTCIceTransportPolicyRelay,
+ RTCIceTransportPolicyNoHost,
+ RTCIceTransportPolicyAll
+};
+
+/** Represents the bundle policy. */
+typedef NS_ENUM(NSInteger, RTCBundlePolicy) {
+ RTCBundlePolicyBalanced,
+ RTCBundlePolicyMaxCompat,
+ RTCBundlePolicyMaxBundle
+};
+
+/** Represents the rtcp mux policy. */
+typedef NS_ENUM(NSInteger, RTCRtcpMuxPolicy) { RTCRtcpMuxPolicyNegotiate, RTCRtcpMuxPolicyRequire };
+
+/** Represents the tcp candidate policy. */
+typedef NS_ENUM(NSInteger, RTCTcpCandidatePolicy) {
+ RTCTcpCandidatePolicyEnabled,
+ RTCTcpCandidatePolicyDisabled
+};
+
+/** Represents the candidate network policy. */
+typedef NS_ENUM(NSInteger, RTCCandidateNetworkPolicy) {
+ RTCCandidateNetworkPolicyAll,
+ RTCCandidateNetworkPolicyLowCost
+};
+
+/** Represents the continual gathering policy. */
+typedef NS_ENUM(NSInteger, RTCContinualGatheringPolicy) {
+ RTCContinualGatheringPolicyGatherOnce,
+ RTCContinualGatheringPolicyGatherContinually
+};
+
+/** Represents the encryption key type. */
+typedef NS_ENUM(NSInteger, RTCEncryptionKeyType) {
+ RTCEncryptionKeyTypeRSA,
+ RTCEncryptionKeyTypeECDSA,
+};
+
+/** Represents the chosen SDP semantics for the RTCPeerConnection. */
+typedef NS_ENUM(NSInteger, RTCSdpSemantics) {
+ // TODO(https://crbug.com/webrtc/13528): Remove support for Plan B.
+ RTCSdpSemanticsPlanB,
+ RTCSdpSemanticsUnifiedPlan,
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCConfiguration) : NSObject
+
+/** If true, allows DSCP codes to be set on outgoing packets, configured using
+ * networkPriority field of RTCRtpEncodingParameters. Defaults to false.
+ */
+@property(nonatomic, assign) BOOL enableDscp;
+
+/** An array of Ice Servers available to be used by ICE. */
+@property(nonatomic, copy) NSArray<RTC_OBJC_TYPE(RTCIceServer) *> *iceServers;
+
+/** An RTCCertificate for 're' use. */
+@property(nonatomic, nullable) RTC_OBJC_TYPE(RTCCertificate) * certificate;
+
+/** Which candidates the ICE agent is allowed to use. The W3C calls it
+ * `iceTransportPolicy`, while in C++ it is called `type`. */
+@property(nonatomic, assign) RTCIceTransportPolicy iceTransportPolicy;
+
+/** The media-bundling policy to use when gathering ICE candidates. */
+@property(nonatomic, assign) RTCBundlePolicy bundlePolicy;
+
+/** The rtcp-mux policy to use when gathering ICE candidates. */
+@property(nonatomic, assign) RTCRtcpMuxPolicy rtcpMuxPolicy;
+@property(nonatomic, assign) RTCTcpCandidatePolicy tcpCandidatePolicy;
+@property(nonatomic, assign) RTCCandidateNetworkPolicy candidateNetworkPolicy;
+@property(nonatomic, assign) RTCContinualGatheringPolicy continualGatheringPolicy;
+
+/** If set to YES, don't gather IPv6 ICE candidates.
+ * Default is NO.
+ */
+@property(nonatomic, assign) BOOL disableIPV6;
+
+/** If set to YES, don't gather IPv6 ICE candidates on Wi-Fi.
+ * Only intended to be used on specific devices. Certain phones disable IPv6
+ * when the screen is turned off and it would be better to just disable the
+ * IPv6 ICE candidates on Wi-Fi in those cases.
+ * Default is NO.
+ */
+@property(nonatomic, assign) BOOL disableIPV6OnWiFi;
+
+/** By default, the PeerConnection will use a limited number of IPv6 network
+ * interfaces, in order to avoid too many ICE candidate pairs being created
+ * and delaying ICE completion.
+ *
+ * Can be set to INT_MAX to effectively disable the limit.
+ */
+@property(nonatomic, assign) int maxIPv6Networks;
+
+/** Exclude link-local network interfaces
+ * from considertaion for gathering ICE candidates.
+ * Defaults to NO.
+ */
+@property(nonatomic, assign) BOOL disableLinkLocalNetworks;
+
+@property(nonatomic, assign) int audioJitterBufferMaxPackets;
+@property(nonatomic, assign) BOOL audioJitterBufferFastAccelerate;
+@property(nonatomic, assign) int iceConnectionReceivingTimeout;
+@property(nonatomic, assign) int iceBackupCandidatePairPingInterval;
+
+/** Key type used to generate SSL identity. Default is ECDSA. */
+@property(nonatomic, assign) RTCEncryptionKeyType keyType;
+
+/** ICE candidate pool size as defined in JSEP. Default is 0. */
+@property(nonatomic, assign) int iceCandidatePoolSize;
+
+/** Prune turn ports on the same network to the same turn server.
+ * Default is NO.
+ */
+@property(nonatomic, assign) BOOL shouldPruneTurnPorts;
+
+/** If set to YES, this means the ICE transport should presume TURN-to-TURN
+ * candidate pairs will succeed, even before a binding response is received.
+ */
+@property(nonatomic, assign) BOOL shouldPresumeWritableWhenFullyRelayed;
+
+/* This flag is only effective when `continualGatheringPolicy` is
+ * RTCContinualGatheringPolicyGatherContinually.
+ *
+ * If YES, after the ICE transport type is changed such that new types of
+ * ICE candidates are allowed by the new transport type, e.g. from
+ * RTCIceTransportPolicyRelay to RTCIceTransportPolicyAll, candidates that
+ * have been gathered by the ICE transport but not matching the previous
+ * transport type and as a result not observed by PeerConnectionDelegateAdapter,
+ * will be surfaced to the delegate.
+ */
+@property(nonatomic, assign) BOOL shouldSurfaceIceCandidatesOnIceTransportTypeChanged;
+
+/** If set to non-nil, controls the minimal interval between consecutive ICE
+ * check packets.
+ */
+@property(nonatomic, copy, nullable) NSNumber *iceCheckMinInterval;
+
+/**
+ * Configure the SDP semantics used by this PeerConnection. By default, this
+ * is RTCSdpSemanticsUnifiedPlan which is compliant to the WebRTC 1.0
+ * specification. It is possible to overrwite this to the deprecated
+ * RTCSdpSemanticsPlanB SDP format, but note that RTCSdpSemanticsPlanB will be
+ * deleted at some future date, see https://crbug.com/webrtc/13528.
+ *
+ * RTCSdpSemanticsUnifiedPlan will cause RTCPeerConnection to create offers and
+ * answers with multiple m= sections where each m= section maps to one
+ * RTCRtpSender and one RTCRtpReceiver (an RTCRtpTransceiver), either both audio
+ * or both video. This will also cause RTCPeerConnection to ignore all but the
+ * first a=ssrc lines that form a Plan B stream.
+ *
+ * RTCSdpSemanticsPlanB will cause RTCPeerConnection to create offers and
+ * answers with at most one audio and one video m= section with multiple
+ * RTCRtpSenders and RTCRtpReceivers specified as multiple a=ssrc lines within
+ * the section. This will also cause RTCPeerConnection to ignore all but the
+ * first m= section of the same media type.
+ */
+@property(nonatomic, assign) RTCSdpSemantics sdpSemantics;
+
+/** Actively reset the SRTP parameters when the DTLS transports underneath are
+ * changed after offer/answer negotiation. This is only intended to be a
+ * workaround for crbug.com/835958
+ */
+@property(nonatomic, assign) BOOL activeResetSrtpParams;
+
+/** If the remote side support mid-stream codec switches then allow encoder
+ * switching to be performed.
+ */
+
+@property(nonatomic, assign) BOOL allowCodecSwitching;
+
+/**
+ * Defines advanced optional cryptographic settings related to SRTP and
+ * frame encryption for native WebRTC. Setting this will overwrite any
+ * options set through the PeerConnectionFactory (which is deprecated).
+ */
+@property(nonatomic, nullable) RTC_OBJC_TYPE(RTCCryptoOptions) * cryptoOptions;
+
+/**
+ * An optional string that will be attached to the TURN_ALLOCATE_REQUEST which
+ * which can be used to correlate client logs with backend logs.
+ */
+@property(nonatomic, nullable, copy) NSString *turnLoggingId;
+
+/**
+ * Time interval between audio RTCP reports.
+ */
+@property(nonatomic, assign) int rtcpAudioReportIntervalMs;
+
+/**
+ * Time interval between video RTCP reports.
+ */
+@property(nonatomic, assign) int rtcpVideoReportIntervalMs;
+
+/**
+ * Allow implicit rollback of local description when remote description
+ * conflicts with local description.
+ * See: https://w3c.github.io/webrtc-pc/#dom-peerconnection-setremotedescription
+ */
+@property(nonatomic, assign) BOOL enableImplicitRollback;
+
+/**
+ * Control if "a=extmap-allow-mixed" is included in the offer.
+ * See: https://www.chromestatus.com/feature/6269234631933952
+ */
+@property(nonatomic, assign) BOOL offerExtmapAllowMixed;
+
+/**
+ * Defines the interval applied to ALL candidate pairs
+ * when ICE is strongly connected, and it overrides the
+ * default value of this interval in the ICE implementation;
+ */
+@property(nonatomic, copy, nullable) NSNumber *iceCheckIntervalStrongConnectivity;
+
+/**
+ * Defines the counterpart for ALL pairs when ICE is
+ * weakly connected, and it overrides the default value of
+ * this interval in the ICE implementation
+ */
+@property(nonatomic, copy, nullable) NSNumber *iceCheckIntervalWeakConnectivity;
+
+/**
+ * The min time period for which a candidate pair must wait for response to
+ * connectivity checks before it becomes unwritable. This parameter
+ * overrides the default value in the ICE implementation if set.
+ */
+@property(nonatomic, copy, nullable) NSNumber *iceUnwritableTimeout;
+
+/**
+ * The min number of connectivity checks that a candidate pair must sent
+ * without receiving response before it becomes unwritable. This parameter
+ * overrides the default value in the ICE implementation if set.
+ */
+@property(nonatomic, copy, nullable) NSNumber *iceUnwritableMinChecks;
+
+/**
+ * The min time period for which a candidate pair must wait for response to
+ * connectivity checks it becomes inactive. This parameter overrides the
+ * default value in the ICE implementation if set.
+ */
+@property(nonatomic, copy, nullable) NSNumber *iceInactiveTimeout;
+
+- (instancetype)init;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.mm
new file mode 100644
index 0000000000..859aa8ad76
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCConfiguration.mm
@@ -0,0 +1,549 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCConfiguration+Private.h"
+
+#include <memory>
+
+#import "RTCCertificate.h"
+#import "RTCConfiguration+Native.h"
+#import "RTCIceServer+Private.h"
+#import "base/RTCLogging.h"
+
+#include "rtc_base/checks.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/ssl_identity.h"
+
+@implementation RTC_OBJC_TYPE (RTCConfiguration)
+
+@synthesize enableDscp = _enableDscp;
+@synthesize iceServers = _iceServers;
+@synthesize certificate = _certificate;
+@synthesize iceTransportPolicy = _iceTransportPolicy;
+@synthesize bundlePolicy = _bundlePolicy;
+@synthesize rtcpMuxPolicy = _rtcpMuxPolicy;
+@synthesize tcpCandidatePolicy = _tcpCandidatePolicy;
+@synthesize candidateNetworkPolicy = _candidateNetworkPolicy;
+@synthesize continualGatheringPolicy = _continualGatheringPolicy;
+@synthesize disableIPV6 = _disableIPV6;
+@synthesize disableIPV6OnWiFi = _disableIPV6OnWiFi;
+@synthesize maxIPv6Networks = _maxIPv6Networks;
+@synthesize disableLinkLocalNetworks = _disableLinkLocalNetworks;
+@synthesize audioJitterBufferMaxPackets = _audioJitterBufferMaxPackets;
+@synthesize audioJitterBufferFastAccelerate = _audioJitterBufferFastAccelerate;
+@synthesize iceConnectionReceivingTimeout = _iceConnectionReceivingTimeout;
+@synthesize iceBackupCandidatePairPingInterval =
+ _iceBackupCandidatePairPingInterval;
+@synthesize keyType = _keyType;
+@synthesize iceCandidatePoolSize = _iceCandidatePoolSize;
+@synthesize shouldPruneTurnPorts = _shouldPruneTurnPorts;
+@synthesize shouldPresumeWritableWhenFullyRelayed =
+ _shouldPresumeWritableWhenFullyRelayed;
+@synthesize shouldSurfaceIceCandidatesOnIceTransportTypeChanged =
+ _shouldSurfaceIceCandidatesOnIceTransportTypeChanged;
+@synthesize iceCheckMinInterval = _iceCheckMinInterval;
+@synthesize sdpSemantics = _sdpSemantics;
+@synthesize turnCustomizer = _turnCustomizer;
+@synthesize activeResetSrtpParams = _activeResetSrtpParams;
+@synthesize allowCodecSwitching = _allowCodecSwitching;
+@synthesize cryptoOptions = _cryptoOptions;
+@synthesize turnLoggingId = _turnLoggingId;
+@synthesize rtcpAudioReportIntervalMs = _rtcpAudioReportIntervalMs;
+@synthesize rtcpVideoReportIntervalMs = _rtcpVideoReportIntervalMs;
+@synthesize enableImplicitRollback = _enableImplicitRollback;
+@synthesize offerExtmapAllowMixed = _offerExtmapAllowMixed;
+@synthesize iceCheckIntervalStrongConnectivity = _iceCheckIntervalStrongConnectivity;
+@synthesize iceCheckIntervalWeakConnectivity = _iceCheckIntervalWeakConnectivity;
+@synthesize iceUnwritableTimeout = _iceUnwritableTimeout;
+@synthesize iceUnwritableMinChecks = _iceUnwritableMinChecks;
+@synthesize iceInactiveTimeout = _iceInactiveTimeout;
+
+- (instancetype)init {
+ // Copy defaults.
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ return [self initWithNativeConfiguration:config];
+}
+
+- (instancetype)initWithNativeConfiguration:
+ (const webrtc::PeerConnectionInterface::RTCConfiguration &)config {
+ if (self = [super init]) {
+ _enableDscp = config.dscp();
+ NSMutableArray *iceServers = [NSMutableArray array];
+ for (const webrtc::PeerConnectionInterface::IceServer& server : config.servers) {
+ RTC_OBJC_TYPE(RTCIceServer) *iceServer =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithNativeServer:server];
+ [iceServers addObject:iceServer];
+ }
+ _iceServers = iceServers;
+ if (!config.certificates.empty()) {
+ rtc::scoped_refptr<rtc::RTCCertificate> native_cert;
+ native_cert = config.certificates[0];
+ rtc::RTCCertificatePEM native_pem = native_cert->ToPEM();
+ _certificate = [[RTC_OBJC_TYPE(RTCCertificate) alloc]
+ initWithPrivateKey:@(native_pem.private_key().c_str())
+ certificate:@(native_pem.certificate().c_str())];
+ }
+ _iceTransportPolicy =
+ [[self class] transportPolicyForTransportsType:config.type];
+ _bundlePolicy =
+ [[self class] bundlePolicyForNativePolicy:config.bundle_policy];
+ _rtcpMuxPolicy =
+ [[self class] rtcpMuxPolicyForNativePolicy:config.rtcp_mux_policy];
+ _tcpCandidatePolicy = [[self class] tcpCandidatePolicyForNativePolicy:
+ config.tcp_candidate_policy];
+ _candidateNetworkPolicy = [[self class]
+ candidateNetworkPolicyForNativePolicy:config.candidate_network_policy];
+ webrtc::PeerConnectionInterface::ContinualGatheringPolicy nativePolicy =
+ config.continual_gathering_policy;
+ _continualGatheringPolicy =
+ [[self class] continualGatheringPolicyForNativePolicy:nativePolicy];
+ _disableIPV6 = config.disable_ipv6;
+ _disableIPV6OnWiFi = config.disable_ipv6_on_wifi;
+ _maxIPv6Networks = config.max_ipv6_networks;
+ _disableLinkLocalNetworks = config.disable_link_local_networks;
+ _audioJitterBufferMaxPackets = config.audio_jitter_buffer_max_packets;
+ _audioJitterBufferFastAccelerate = config.audio_jitter_buffer_fast_accelerate;
+ _iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout;
+ _iceBackupCandidatePairPingInterval =
+ config.ice_backup_candidate_pair_ping_interval;
+ _keyType = RTCEncryptionKeyTypeECDSA;
+ _iceCandidatePoolSize = config.ice_candidate_pool_size;
+ _shouldPruneTurnPorts = config.prune_turn_ports;
+ _shouldPresumeWritableWhenFullyRelayed =
+ config.presume_writable_when_fully_relayed;
+ _shouldSurfaceIceCandidatesOnIceTransportTypeChanged =
+ config.surface_ice_candidates_on_ice_transport_type_changed;
+ if (config.ice_check_min_interval) {
+ _iceCheckMinInterval =
+ [NSNumber numberWithInt:*config.ice_check_min_interval];
+ }
+ _sdpSemantics = [[self class] sdpSemanticsForNativeSdpSemantics:config.sdp_semantics];
+ _turnCustomizer = config.turn_customizer;
+ _activeResetSrtpParams = config.active_reset_srtp_params;
+ if (config.crypto_options) {
+ _cryptoOptions = [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc]
+ initWithSrtpEnableGcmCryptoSuites:config.crypto_options->srtp
+ .enable_gcm_crypto_suites
+ srtpEnableAes128Sha1_32CryptoCipher:config.crypto_options->srtp
+ .enable_aes128_sha1_32_crypto_cipher
+ srtpEnableEncryptedRtpHeaderExtensions:config.crypto_options->srtp
+ .enable_encrypted_rtp_header_extensions
+ sframeRequireFrameEncryption:config.crypto_options->sframe
+ .require_frame_encryption];
+ }
+ _turnLoggingId = [NSString stringWithUTF8String:config.turn_logging_id.c_str()];
+ _rtcpAudioReportIntervalMs = config.audio_rtcp_report_interval_ms();
+ _rtcpVideoReportIntervalMs = config.video_rtcp_report_interval_ms();
+ _allowCodecSwitching = config.allow_codec_switching.value_or(false);
+ _enableImplicitRollback = config.enable_implicit_rollback;
+ _offerExtmapAllowMixed = config.offer_extmap_allow_mixed;
+ _iceCheckIntervalStrongConnectivity =
+ config.ice_check_interval_strong_connectivity.has_value() ?
+ [NSNumber numberWithInt:*config.ice_check_interval_strong_connectivity] :
+ nil;
+ _iceCheckIntervalWeakConnectivity = config.ice_check_interval_weak_connectivity.has_value() ?
+ [NSNumber numberWithInt:*config.ice_check_interval_weak_connectivity] :
+ nil;
+ _iceUnwritableTimeout = config.ice_unwritable_timeout.has_value() ?
+ [NSNumber numberWithInt:*config.ice_unwritable_timeout] :
+ nil;
+ _iceUnwritableMinChecks = config.ice_unwritable_min_checks.has_value() ?
+ [NSNumber numberWithInt:*config.ice_unwritable_min_checks] :
+ nil;
+ _iceInactiveTimeout = config.ice_inactive_timeout.has_value() ?
+ [NSNumber numberWithInt:*config.ice_inactive_timeout] :
+ nil;
+ }
+ return self;
+}
+
+- (NSString *)description {
+ static NSString *formatString = @"RTC_OBJC_TYPE(RTCConfiguration): "
+ @"{\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n"
+ @"%d\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n%d\n}\n";
+
+ return [NSString
+ stringWithFormat:formatString,
+ _iceServers,
+ [[self class] stringForTransportPolicy:_iceTransportPolicy],
+ [[self class] stringForBundlePolicy:_bundlePolicy],
+ [[self class] stringForRtcpMuxPolicy:_rtcpMuxPolicy],
+ [[self class] stringForTcpCandidatePolicy:_tcpCandidatePolicy],
+ [[self class] stringForCandidateNetworkPolicy:_candidateNetworkPolicy],
+ [[self class] stringForContinualGatheringPolicy:_continualGatheringPolicy],
+ [[self class] stringForSdpSemantics:_sdpSemantics],
+ _audioJitterBufferMaxPackets,
+ _audioJitterBufferFastAccelerate,
+ _iceConnectionReceivingTimeout,
+ _iceBackupCandidatePairPingInterval,
+ _iceCandidatePoolSize,
+ _shouldPruneTurnPorts,
+ _shouldPresumeWritableWhenFullyRelayed,
+ _shouldSurfaceIceCandidatesOnIceTransportTypeChanged,
+ _iceCheckMinInterval,
+ _disableLinkLocalNetworks,
+ _disableIPV6,
+ _disableIPV6OnWiFi,
+ _maxIPv6Networks,
+ _activeResetSrtpParams,
+ _enableDscp,
+ _enableImplicitRollback];
+}
+
+#pragma mark - Private
+
+- (webrtc::PeerConnectionInterface::RTCConfiguration *)
+ createNativeConfiguration {
+ std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration>
+ nativeConfig(new webrtc::PeerConnectionInterface::RTCConfiguration(
+ webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive));
+
+ nativeConfig->set_dscp(_enableDscp);
+ for (RTC_OBJC_TYPE(RTCIceServer) * iceServer in _iceServers) {
+ nativeConfig->servers.push_back(iceServer.nativeServer);
+ }
+ nativeConfig->type =
+ [[self class] nativeTransportsTypeForTransportPolicy:_iceTransportPolicy];
+ nativeConfig->bundle_policy =
+ [[self class] nativeBundlePolicyForPolicy:_bundlePolicy];
+ nativeConfig->rtcp_mux_policy =
+ [[self class] nativeRtcpMuxPolicyForPolicy:_rtcpMuxPolicy];
+ nativeConfig->tcp_candidate_policy =
+ [[self class] nativeTcpCandidatePolicyForPolicy:_tcpCandidatePolicy];
+ nativeConfig->candidate_network_policy = [[self class]
+ nativeCandidateNetworkPolicyForPolicy:_candidateNetworkPolicy];
+ nativeConfig->continual_gathering_policy = [[self class]
+ nativeContinualGatheringPolicyForPolicy:_continualGatheringPolicy];
+ nativeConfig->disable_ipv6 = _disableIPV6;
+ nativeConfig->disable_ipv6_on_wifi = _disableIPV6OnWiFi;
+ nativeConfig->max_ipv6_networks = _maxIPv6Networks;
+ nativeConfig->disable_link_local_networks = _disableLinkLocalNetworks;
+ nativeConfig->audio_jitter_buffer_max_packets = _audioJitterBufferMaxPackets;
+ nativeConfig->audio_jitter_buffer_fast_accelerate =
+ _audioJitterBufferFastAccelerate ? true : false;
+ nativeConfig->ice_connection_receiving_timeout =
+ _iceConnectionReceivingTimeout;
+ nativeConfig->ice_backup_candidate_pair_ping_interval =
+ _iceBackupCandidatePairPingInterval;
+ rtc::KeyType keyType =
+ [[self class] nativeEncryptionKeyTypeForKeyType:_keyType];
+ if (_certificate != nullptr) {
+ // if offered a pemcert use it...
+ RTC_LOG(LS_INFO) << "Have configured cert - using it.";
+ std::string pem_private_key = [[_certificate private_key] UTF8String];
+ std::string pem_certificate = [[_certificate certificate] UTF8String];
+ rtc::RTCCertificatePEM pem = rtc::RTCCertificatePEM(pem_private_key, pem_certificate);
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate = rtc::RTCCertificate::FromPEM(pem);
+ RTC_LOG(LS_INFO) << "Created cert from PEM strings.";
+ if (!certificate) {
+ RTC_LOG(LS_ERROR) << "Failed to generate certificate from PEM.";
+ return nullptr;
+ }
+ nativeConfig->certificates.push_back(certificate);
+ } else {
+ RTC_LOG(LS_INFO) << "Don't have configured cert.";
+ // Generate non-default certificate.
+ if (keyType != rtc::KT_DEFAULT) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType),
+ absl::optional<uint64_t>());
+ if (!certificate) {
+ RTCLogError(@"Failed to generate certificate.");
+ return nullptr;
+ }
+ nativeConfig->certificates.push_back(certificate);
+ }
+ }
+ nativeConfig->ice_candidate_pool_size = _iceCandidatePoolSize;
+ nativeConfig->prune_turn_ports = _shouldPruneTurnPorts ? true : false;
+ nativeConfig->presume_writable_when_fully_relayed =
+ _shouldPresumeWritableWhenFullyRelayed ? true : false;
+ nativeConfig->surface_ice_candidates_on_ice_transport_type_changed =
+ _shouldSurfaceIceCandidatesOnIceTransportTypeChanged ? true : false;
+ if (_iceCheckMinInterval != nil) {
+ nativeConfig->ice_check_min_interval = absl::optional<int>(_iceCheckMinInterval.intValue);
+ }
+ nativeConfig->sdp_semantics = [[self class] nativeSdpSemanticsForSdpSemantics:_sdpSemantics];
+ if (_turnCustomizer) {
+ nativeConfig->turn_customizer = _turnCustomizer;
+ }
+ nativeConfig->active_reset_srtp_params = _activeResetSrtpParams ? true : false;
+ if (_cryptoOptions) {
+ webrtc::CryptoOptions nativeCryptoOptions;
+ nativeCryptoOptions.srtp.enable_gcm_crypto_suites =
+ _cryptoOptions.srtpEnableGcmCryptoSuites ? true : false;
+ nativeCryptoOptions.srtp.enable_aes128_sha1_32_crypto_cipher =
+ _cryptoOptions.srtpEnableAes128Sha1_32CryptoCipher ? true : false;
+ nativeCryptoOptions.srtp.enable_encrypted_rtp_header_extensions =
+ _cryptoOptions.srtpEnableEncryptedRtpHeaderExtensions ? true : false;
+ nativeCryptoOptions.sframe.require_frame_encryption =
+ _cryptoOptions.sframeRequireFrameEncryption ? true : false;
+ nativeConfig->crypto_options = absl::optional<webrtc::CryptoOptions>(nativeCryptoOptions);
+ }
+ nativeConfig->turn_logging_id = [_turnLoggingId UTF8String];
+ nativeConfig->set_audio_rtcp_report_interval_ms(_rtcpAudioReportIntervalMs);
+ nativeConfig->set_video_rtcp_report_interval_ms(_rtcpVideoReportIntervalMs);
+ nativeConfig->allow_codec_switching = _allowCodecSwitching;
+ nativeConfig->enable_implicit_rollback = _enableImplicitRollback;
+ nativeConfig->offer_extmap_allow_mixed = _offerExtmapAllowMixed;
+ if (_iceCheckIntervalStrongConnectivity != nil) {
+ nativeConfig->ice_check_interval_strong_connectivity =
+ absl::optional<int>(_iceCheckIntervalStrongConnectivity.intValue);
+ }
+ if (_iceCheckIntervalWeakConnectivity != nil) {
+ nativeConfig->ice_check_interval_weak_connectivity =
+ absl::optional<int>(_iceCheckIntervalWeakConnectivity.intValue);
+ }
+ if (_iceUnwritableTimeout != nil) {
+ nativeConfig->ice_unwritable_timeout = absl::optional<int>(_iceUnwritableTimeout.intValue);
+ }
+ if (_iceUnwritableMinChecks != nil) {
+ nativeConfig->ice_unwritable_min_checks = absl::optional<int>(_iceUnwritableMinChecks.intValue);
+ }
+ if (_iceInactiveTimeout != nil) {
+ nativeConfig->ice_inactive_timeout = absl::optional<int>(_iceInactiveTimeout.intValue);
+ }
+ return nativeConfig.release();
+}
+
++ (webrtc::PeerConnectionInterface::IceTransportsType)
+ nativeTransportsTypeForTransportPolicy:(RTCIceTransportPolicy)policy {
+ switch (policy) {
+ case RTCIceTransportPolicyNone:
+ return webrtc::PeerConnectionInterface::kNone;
+ case RTCIceTransportPolicyRelay:
+ return webrtc::PeerConnectionInterface::kRelay;
+ case RTCIceTransportPolicyNoHost:
+ return webrtc::PeerConnectionInterface::kNoHost;
+ case RTCIceTransportPolicyAll:
+ return webrtc::PeerConnectionInterface::kAll;
+ }
+}
+
++ (RTCIceTransportPolicy)transportPolicyForTransportsType:
+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeType {
+ switch (nativeType) {
+ case webrtc::PeerConnectionInterface::kNone:
+ return RTCIceTransportPolicyNone;
+ case webrtc::PeerConnectionInterface::kRelay:
+ return RTCIceTransportPolicyRelay;
+ case webrtc::PeerConnectionInterface::kNoHost:
+ return RTCIceTransportPolicyNoHost;
+ case webrtc::PeerConnectionInterface::kAll:
+ return RTCIceTransportPolicyAll;
+ }
+}
+
++ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy {
+ switch (policy) {
+ case RTCIceTransportPolicyNone:
+ return @"NONE";
+ case RTCIceTransportPolicyRelay:
+ return @"RELAY";
+ case RTCIceTransportPolicyNoHost:
+ return @"NO_HOST";
+ case RTCIceTransportPolicyAll:
+ return @"ALL";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy:
+ (RTCBundlePolicy)policy {
+ switch (policy) {
+ case RTCBundlePolicyBalanced:
+ return webrtc::PeerConnectionInterface::kBundlePolicyBalanced;
+ case RTCBundlePolicyMaxCompat:
+ return webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat;
+ case RTCBundlePolicyMaxBundle:
+ return webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle;
+ }
+}
+
++ (RTCBundlePolicy)bundlePolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy {
+ switch (nativePolicy) {
+ case webrtc::PeerConnectionInterface::kBundlePolicyBalanced:
+ return RTCBundlePolicyBalanced;
+ case webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat:
+ return RTCBundlePolicyMaxCompat;
+ case webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle:
+ return RTCBundlePolicyMaxBundle;
+ }
+}
+
++ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy {
+ switch (policy) {
+ case RTCBundlePolicyBalanced:
+ return @"BALANCED";
+ case RTCBundlePolicyMaxCompat:
+ return @"MAX_COMPAT";
+ case RTCBundlePolicyMaxBundle:
+ return @"MAX_BUNDLE";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy:
+ (RTCRtcpMuxPolicy)policy {
+ switch (policy) {
+ case RTCRtcpMuxPolicyNegotiate:
+ return webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+ case RTCRtcpMuxPolicyRequire:
+ return webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire;
+ }
+}
+
++ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy {
+ switch (nativePolicy) {
+ case webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate:
+ return RTCRtcpMuxPolicyNegotiate;
+ case webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire:
+ return RTCRtcpMuxPolicyRequire;
+ }
+}
+
++ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy {
+ switch (policy) {
+ case RTCRtcpMuxPolicyNegotiate:
+ return @"NEGOTIATE";
+ case RTCRtcpMuxPolicyRequire:
+ return @"REQUIRE";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)
+ nativeTcpCandidatePolicyForPolicy:(RTCTcpCandidatePolicy)policy {
+ switch (policy) {
+ case RTCTcpCandidatePolicyEnabled:
+ return webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+ case RTCTcpCandidatePolicyDisabled:
+ return webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+ }
+}
+
++ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)
+ nativeCandidateNetworkPolicyForPolicy:(RTCCandidateNetworkPolicy)policy {
+ switch (policy) {
+ case RTCCandidateNetworkPolicyAll:
+ return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll;
+ case RTCCandidateNetworkPolicyLowCost:
+ return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost;
+ }
+}
+
++ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy {
+ switch (nativePolicy) {
+ case webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled:
+ return RTCTcpCandidatePolicyEnabled;
+ case webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled:
+ return RTCTcpCandidatePolicyDisabled;
+ }
+}
+
++ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy {
+ switch (policy) {
+ case RTCTcpCandidatePolicyEnabled:
+ return @"TCP_ENABLED";
+ case RTCTcpCandidatePolicyDisabled:
+ return @"TCP_DISABLED";
+ }
+}
+
++ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy {
+ switch (nativePolicy) {
+ case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll:
+ return RTCCandidateNetworkPolicyAll;
+ case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost:
+ return RTCCandidateNetworkPolicyLowCost;
+ }
+}
+
++ (NSString *)stringForCandidateNetworkPolicy:
+ (RTCCandidateNetworkPolicy)policy {
+ switch (policy) {
+ case RTCCandidateNetworkPolicyAll:
+ return @"CANDIDATE_ALL_NETWORKS";
+ case RTCCandidateNetworkPolicyLowCost:
+ return @"CANDIDATE_LOW_COST_NETWORKS";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::ContinualGatheringPolicy)
+ nativeContinualGatheringPolicyForPolicy:
+ (RTCContinualGatheringPolicy)policy {
+ switch (policy) {
+ case RTCContinualGatheringPolicyGatherOnce:
+ return webrtc::PeerConnectionInterface::GATHER_ONCE;
+ case RTCContinualGatheringPolicyGatherContinually:
+ return webrtc::PeerConnectionInterface::GATHER_CONTINUALLY;
+ }
+}
+
++ (RTCContinualGatheringPolicy)continualGatheringPolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::ContinualGatheringPolicy)nativePolicy {
+ switch (nativePolicy) {
+ case webrtc::PeerConnectionInterface::GATHER_ONCE:
+ return RTCContinualGatheringPolicyGatherOnce;
+ case webrtc::PeerConnectionInterface::GATHER_CONTINUALLY:
+ return RTCContinualGatheringPolicyGatherContinually;
+ }
+}
+
++ (NSString *)stringForContinualGatheringPolicy:
+ (RTCContinualGatheringPolicy)policy {
+ switch (policy) {
+ case RTCContinualGatheringPolicyGatherOnce:
+ return @"GATHER_ONCE";
+ case RTCContinualGatheringPolicyGatherContinually:
+ return @"GATHER_CONTINUALLY";
+ }
+}
+
++ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:
+ (RTCEncryptionKeyType)keyType {
+ switch (keyType) {
+ case RTCEncryptionKeyTypeRSA:
+ return rtc::KT_RSA;
+ case RTCEncryptionKeyTypeECDSA:
+ return rtc::KT_ECDSA;
+ }
+}
+
++ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics:(RTCSdpSemantics)sdpSemantics {
+ switch (sdpSemantics) {
+ case RTCSdpSemanticsPlanB:
+ return webrtc::SdpSemantics::kPlanB_DEPRECATED;
+ case RTCSdpSemanticsUnifiedPlan:
+ return webrtc::SdpSemantics::kUnifiedPlan;
+ }
+}
+
++ (RTCSdpSemantics)sdpSemanticsForNativeSdpSemantics:(webrtc::SdpSemantics)sdpSemantics {
+ switch (sdpSemantics) {
+ case webrtc::SdpSemantics::kPlanB_DEPRECATED:
+ return RTCSdpSemanticsPlanB;
+ case webrtc::SdpSemantics::kUnifiedPlan:
+ return RTCSdpSemanticsUnifiedPlan;
+ }
+}
+
++ (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics {
+ switch (sdpSemantics) {
+ case RTCSdpSemanticsPlanB:
+ return @"PLAN_B";
+ case RTCSdpSemanticsUnifiedPlan:
+ return @"UNIFIED_PLAN";
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.h
new file mode 100644
index 0000000000..7894c8d50c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Objective-C bindings for webrtc::CryptoOptions. This API had to be flattened
+ * as Objective-C doesn't support nested structures.
+ */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCCryptoOptions) : NSObject
+
+/**
+ * Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used
+ * if both sides enable it
+ */
+@property(nonatomic, assign) BOOL srtpEnableGcmCryptoSuites;
+/**
+ * If set to true, the (potentially insecure) crypto cipher
+ * kSrtpAes128CmSha1_32 will be included in the list of supported ciphers
+ * during negotiation. It will only be used if both peers support it and no
+ * other ciphers get preferred.
+ */
+@property(nonatomic, assign) BOOL srtpEnableAes128Sha1_32CryptoCipher;
+/**
+ * If set to true, encrypted RTP header extensions as defined in RFC 6904
+ * will be negotiated. They will only be used if both peers support them.
+ */
+@property(nonatomic, assign) BOOL srtpEnableEncryptedRtpHeaderExtensions;
+
+/**
+ * If set all RtpSenders must have an FrameEncryptor attached to them before
+ * they are allowed to send packets. All RtpReceivers must have a
+ * FrameDecryptor attached to them before they are able to receive packets.
+ */
+@property(nonatomic, assign) BOOL sframeRequireFrameEncryption;
+
+/**
+ * Initializes CryptoOptions with all possible options set explicitly. This
+ * is done when converting from a native RTCConfiguration.crypto_options.
+ */
+- (instancetype)initWithSrtpEnableGcmCryptoSuites:(BOOL)srtpEnableGcmCryptoSuites
+ srtpEnableAes128Sha1_32CryptoCipher:(BOOL)srtpEnableAes128Sha1_32CryptoCipher
+ srtpEnableEncryptedRtpHeaderExtensions:(BOOL)srtpEnableEncryptedRtpHeaderExtensions
+ sframeRequireFrameEncryption:(BOOL)sframeRequireFrameEncryption
+ NS_DESIGNATED_INITIALIZER;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.mm
new file mode 100644
index 0000000000..fbaa1de58d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCCryptoOptions.mm
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCCryptoOptions.h"
+
+@implementation RTC_OBJC_TYPE (RTCCryptoOptions)
+
+@synthesize srtpEnableGcmCryptoSuites = _srtpEnableGcmCryptoSuites;
+@synthesize srtpEnableAes128Sha1_32CryptoCipher = _srtpEnableAes128Sha1_32CryptoCipher;
+@synthesize srtpEnableEncryptedRtpHeaderExtensions = _srtpEnableEncryptedRtpHeaderExtensions;
+@synthesize sframeRequireFrameEncryption = _sframeRequireFrameEncryption;
+
+- (instancetype)initWithSrtpEnableGcmCryptoSuites:(BOOL)srtpEnableGcmCryptoSuites
+ srtpEnableAes128Sha1_32CryptoCipher:(BOOL)srtpEnableAes128Sha1_32CryptoCipher
+ srtpEnableEncryptedRtpHeaderExtensions:(BOOL)srtpEnableEncryptedRtpHeaderExtensions
+ sframeRequireFrameEncryption:(BOOL)sframeRequireFrameEncryption {
+ if (self = [super init]) {
+ _srtpEnableGcmCryptoSuites = srtpEnableGcmCryptoSuites;
+ _srtpEnableAes128Sha1_32CryptoCipher = srtpEnableAes128Sha1_32CryptoCipher;
+ _srtpEnableEncryptedRtpHeaderExtensions = srtpEnableEncryptedRtpHeaderExtensions;
+ _sframeRequireFrameEncryption = sframeRequireFrameEncryption;
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel+Private.h
new file mode 100644
index 0000000000..2cdbdabec6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel+Private.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDataChannel.h"
+
+#include "api/data_channel_interface.h"
+#include "api/scoped_refptr.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+
+@interface RTC_OBJC_TYPE (RTCDataBuffer)
+()
+
+ /**
+ * The native DataBuffer representation of this RTCDatabuffer object. This is
+ * needed to pass to the underlying C++ APIs.
+ */
+ @property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer;
+
+/** Initialize an RTCDataBuffer from a native DataBuffer. */
+- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer &)nativeBuffer;
+
+@end
+
+@interface RTC_OBJC_TYPE (RTCDataChannel)
+()
+
+ /** Initialize an RTCDataChannel from a native DataChannelInterface. */
+ - (instancetype)initWithFactory
+ : (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeDataChannel
+ : (rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel NS_DESIGNATED_INITIALIZER;
+
++ (webrtc::DataChannelInterface::DataState)nativeDataChannelStateForState:
+ (RTCDataChannelState)state;
+
++ (RTCDataChannelState)dataChannelStateForNativeState:
+ (webrtc::DataChannelInterface::DataState)nativeState;
+
++ (NSString *)stringForState:(RTCDataChannelState)state;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.h
new file mode 100644
index 0000000000..89eb58bc3f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AvailabilityMacros.h>
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCDataBuffer) : NSObject
+
+/** NSData representation of the underlying buffer. */
+@property(nonatomic, readonly) NSData *data;
+
+/** Indicates whether `data` contains UTF-8 or binary data. */
+@property(nonatomic, readonly) BOOL isBinary;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Initialize an RTCDataBuffer from NSData. `isBinary` indicates whether `data`
+ * contains UTF-8 or binary data.
+ */
+- (instancetype)initWithData:(NSData *)data isBinary:(BOOL)isBinary;
+
+@end
+
+@class RTC_OBJC_TYPE(RTCDataChannel);
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCDataChannelDelegate)<NSObject>
+
+ /** The data channel state changed. */
+ - (void)dataChannelDidChangeState : (RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel;
+
+/** The data channel successfully received a data buffer. */
+- (void)dataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel
+ didReceiveMessageWithBuffer:(RTC_OBJC_TYPE(RTCDataBuffer) *)buffer;
+
+@optional
+/** The data channel's `bufferedAmount` changed. */
+- (void)dataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel
+ didChangeBufferedAmount:(uint64_t)amount;
+
+@end
+
+/** Represents the state of the data channel. */
+typedef NS_ENUM(NSInteger, RTCDataChannelState) {
+ RTCDataChannelStateConnecting,
+ RTCDataChannelStateOpen,
+ RTCDataChannelStateClosing,
+ RTCDataChannelStateClosed,
+};
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCDataChannel) : NSObject
+
+/**
+ * A label that can be used to distinguish this data channel from other data
+ * channel objects.
+ */
+@property(nonatomic, readonly) NSString *label;
+
+/** Whether the data channel can send messages in unreliable mode. */
+@property(nonatomic, readonly) BOOL isReliable DEPRECATED_ATTRIBUTE;
+
+/** Returns whether this data channel is ordered or not. */
+@property(nonatomic, readonly) BOOL isOrdered;
+
+/** Deprecated. Use maxPacketLifeTime. */
+@property(nonatomic, readonly) NSUInteger maxRetransmitTime DEPRECATED_ATTRIBUTE;
+
+/**
+ * The length of the time window (in milliseconds) during which transmissions
+ * and retransmissions may occur in unreliable mode.
+ */
+@property(nonatomic, readonly) uint16_t maxPacketLifeTime;
+
+/**
+ * The maximum number of retransmissions that are attempted in unreliable mode.
+ */
+@property(nonatomic, readonly) uint16_t maxRetransmits;
+
+/**
+ * The name of the sub-protocol used with this data channel, if any. Otherwise
+ * this returns an empty string.
+ */
+@property(nonatomic, readonly) NSString *protocol;
+
+/**
+ * Returns whether this data channel was negotiated by the application or not.
+ */
+@property(nonatomic, readonly) BOOL isNegotiated;
+
+/** Deprecated. Use channelId. */
+@property(nonatomic, readonly) NSInteger streamId DEPRECATED_ATTRIBUTE;
+
+/** The identifier for this data channel. */
+@property(nonatomic, readonly) int channelId;
+
+/** The state of the data channel. */
+@property(nonatomic, readonly) RTCDataChannelState readyState;
+
+/**
+ * The number of bytes of application data that have been queued using
+ * `sendData:` but that have not yet been transmitted to the network.
+ */
+@property(nonatomic, readonly) uint64_t bufferedAmount;
+
+/** The delegate for this data channel. */
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCDataChannelDelegate)> delegate;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Closes the data channel. */
+- (void)close;
+
+/** Attempt to send `data` on this data channel's underlying data transport. */
+- (BOOL)sendData:(RTC_OBJC_TYPE(RTCDataBuffer) *)data;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.mm
new file mode 100644
index 0000000000..4a79cefdb4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannel.mm
@@ -0,0 +1,220 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDataChannel+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+#include <memory>
+
+namespace webrtc {
+
+class DataChannelDelegateAdapter : public DataChannelObserver {
+ public:
+ DataChannelDelegateAdapter(RTC_OBJC_TYPE(RTCDataChannel) * channel) { channel_ = channel; }
+
+ void OnStateChange() override {
+ [channel_.delegate dataChannelDidChangeState:channel_];
+ }
+
+ void OnMessage(const DataBuffer& buffer) override {
+ RTC_OBJC_TYPE(RTCDataBuffer) *data_buffer =
+ [[RTC_OBJC_TYPE(RTCDataBuffer) alloc] initWithNativeBuffer:buffer];
+ [channel_.delegate dataChannel:channel_
+ didReceiveMessageWithBuffer:data_buffer];
+ }
+
+ void OnBufferedAmountChange(uint64_t previousAmount) override {
+ id<RTC_OBJC_TYPE(RTCDataChannelDelegate)> delegate = channel_.delegate;
+ SEL sel = @selector(dataChannel:didChangeBufferedAmount:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate dataChannel:channel_ didChangeBufferedAmount:previousAmount];
+ }
+ }
+
+ private:
+ __weak RTC_OBJC_TYPE(RTCDataChannel) * channel_;
+};
+}
+
+@implementation RTC_OBJC_TYPE (RTCDataBuffer) {
+ std::unique_ptr<webrtc::DataBuffer> _dataBuffer;
+}
+
+- (instancetype)initWithData:(NSData *)data isBinary:(BOOL)isBinary {
+ NSParameterAssert(data);
+ if (self = [super init]) {
+ rtc::CopyOnWriteBuffer buffer(
+ reinterpret_cast<const uint8_t*>(data.bytes), data.length);
+ _dataBuffer.reset(new webrtc::DataBuffer(buffer, isBinary));
+ }
+ return self;
+}
+
+- (NSData *)data {
+ return [NSData dataWithBytes:_dataBuffer->data.data()
+ length:_dataBuffer->data.size()];
+}
+
+- (BOOL)isBinary {
+ return _dataBuffer->binary;
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer&)nativeBuffer {
+ if (self = [super init]) {
+ _dataBuffer.reset(new webrtc::DataBuffer(nativeBuffer));
+ }
+ return self;
+}
+
+- (const webrtc::DataBuffer *)nativeDataBuffer {
+ return _dataBuffer.get();
+}
+
+@end
+
+@implementation RTC_OBJC_TYPE (RTCDataChannel) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ rtc::scoped_refptr<webrtc::DataChannelInterface> _nativeDataChannel;
+ std::unique_ptr<webrtc::DataChannelDelegateAdapter> _observer;
+ BOOL _isObserverRegistered;
+}
+
+@synthesize delegate = _delegate;
+
+- (void)dealloc {
+ // Handles unregistering the observer properly. We need to do this because
+ // there may still be other references to the underlying data channel.
+ _nativeDataChannel->UnregisterObserver();
+}
+
+- (NSString *)label {
+ return [NSString stringForStdString:_nativeDataChannel->label()];
+}
+
+- (BOOL)isReliable {
+ return _nativeDataChannel->reliable();
+}
+
+- (BOOL)isOrdered {
+ return _nativeDataChannel->ordered();
+}
+
+- (NSUInteger)maxRetransmitTime {
+ return self.maxPacketLifeTime;
+}
+
+- (uint16_t)maxPacketLifeTime {
+ return _nativeDataChannel->maxRetransmitTime();
+}
+
+- (uint16_t)maxRetransmits {
+ return _nativeDataChannel->maxRetransmits();
+}
+
+- (NSString *)protocol {
+ return [NSString stringForStdString:_nativeDataChannel->protocol()];
+}
+
+- (BOOL)isNegotiated {
+ return _nativeDataChannel->negotiated();
+}
+
+- (NSInteger)streamId {
+ return self.channelId;
+}
+
+- (int)channelId {
+ return _nativeDataChannel->id();
+}
+
+- (RTCDataChannelState)readyState {
+ return [[self class] dataChannelStateForNativeState:
+ _nativeDataChannel->state()];
+}
+
+- (uint64_t)bufferedAmount {
+ return _nativeDataChannel->buffered_amount();
+}
+
+- (void)close {
+ _nativeDataChannel->Close();
+}
+
+- (BOOL)sendData:(RTC_OBJC_TYPE(RTCDataBuffer) *)data {
+ return _nativeDataChannel->Send(*data.nativeDataBuffer);
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCDataChannel):\n%ld\n%@\n%@",
+ (long)self.channelId,
+ self.label,
+ [[self class] stringForState:self.readyState]];
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeDataChannel:
+ (rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel {
+ NSParameterAssert(nativeDataChannel);
+ if (self = [super init]) {
+ _factory = factory;
+ _nativeDataChannel = nativeDataChannel;
+ _observer.reset(new webrtc::DataChannelDelegateAdapter(self));
+ _nativeDataChannel->RegisterObserver(_observer.get());
+ }
+ return self;
+}
+
++ (webrtc::DataChannelInterface::DataState)
+ nativeDataChannelStateForState:(RTCDataChannelState)state {
+ switch (state) {
+ case RTCDataChannelStateConnecting:
+ return webrtc::DataChannelInterface::DataState::kConnecting;
+ case RTCDataChannelStateOpen:
+ return webrtc::DataChannelInterface::DataState::kOpen;
+ case RTCDataChannelStateClosing:
+ return webrtc::DataChannelInterface::DataState::kClosing;
+ case RTCDataChannelStateClosed:
+ return webrtc::DataChannelInterface::DataState::kClosed;
+ }
+}
+
++ (RTCDataChannelState)dataChannelStateForNativeState:
+ (webrtc::DataChannelInterface::DataState)nativeState {
+ switch (nativeState) {
+ case webrtc::DataChannelInterface::DataState::kConnecting:
+ return RTCDataChannelStateConnecting;
+ case webrtc::DataChannelInterface::DataState::kOpen:
+ return RTCDataChannelStateOpen;
+ case webrtc::DataChannelInterface::DataState::kClosing:
+ return RTCDataChannelStateClosing;
+ case webrtc::DataChannelInterface::DataState::kClosed:
+ return RTCDataChannelStateClosed;
+ }
+}
+
++ (NSString *)stringForState:(RTCDataChannelState)state {
+ switch (state) {
+ case RTCDataChannelStateConnecting:
+ return @"Connecting";
+ case RTCDataChannelStateOpen:
+ return @"Open";
+ case RTCDataChannelStateClosing:
+ return @"Closing";
+ case RTCDataChannelStateClosed:
+ return @"Closed";
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h
new file mode 100644
index 0000000000..5aef10fcef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDataChannelConfiguration.h"
+
+#include "api/data_channel_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCDataChannelConfiguration)
+()
+
+ @property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h
new file mode 100644
index 0000000000..9459ae0a13
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AvailabilityMacros.h>
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCDataChannelConfiguration) : NSObject
+
+/** Set to YES if ordered delivery is required. */
+@property(nonatomic, assign) BOOL isOrdered;
+
+/** Deprecated. Use maxPacketLifeTime. */
+@property(nonatomic, assign) NSInteger maxRetransmitTimeMs DEPRECATED_ATTRIBUTE;
+
+/**
+ * Max period in milliseconds in which retransmissions will be sent. After this
+ * time, no more retransmissions will be sent. -1 if unset.
+ */
+@property(nonatomic, assign) int maxPacketLifeTime;
+
+/** The max number of retransmissions. -1 if unset. */
+@property(nonatomic, assign) int maxRetransmits;
+
+/** Set to YES if the channel has been externally negotiated and we do not send
+ * an in-band signalling in the form of an "open" message.
+ */
+@property(nonatomic, assign) BOOL isNegotiated;
+
+/** Deprecated. Use channelId. */
+@property(nonatomic, assign) int streamId DEPRECATED_ATTRIBUTE;
+
+/** The id of the data channel. */
+@property(nonatomic, assign) int channelId;
+
+/** Set by the application and opaque to the WebRTC implementation. */
+@property(nonatomic) NSString* protocol;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm
new file mode 100644
index 0000000000..bf775b1afd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDataChannelConfiguration+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCDataChannelConfiguration)
+
+@synthesize nativeDataChannelInit = _nativeDataChannelInit;
+
+- (BOOL)isOrdered {
+ return _nativeDataChannelInit.ordered;
+}
+
+- (void)setIsOrdered:(BOOL)isOrdered {
+ _nativeDataChannelInit.ordered = isOrdered;
+}
+
+- (NSInteger)maxRetransmitTimeMs {
+ return self.maxPacketLifeTime;
+}
+
+- (void)setMaxRetransmitTimeMs:(NSInteger)maxRetransmitTimeMs {
+ self.maxPacketLifeTime = maxRetransmitTimeMs;
+}
+
+- (int)maxPacketLifeTime {
+ return *_nativeDataChannelInit.maxRetransmitTime;
+}
+
+- (void)setMaxPacketLifeTime:(int)maxPacketLifeTime {
+ _nativeDataChannelInit.maxRetransmitTime = maxPacketLifeTime;
+}
+
+- (int)maxRetransmits {
+ if (_nativeDataChannelInit.maxRetransmits) {
+ return *_nativeDataChannelInit.maxRetransmits;
+ } else {
+ return -1;
+ }
+}
+
+- (void)setMaxRetransmits:(int)maxRetransmits {
+ _nativeDataChannelInit.maxRetransmits = maxRetransmits;
+}
+
+- (NSString *)protocol {
+ return [NSString stringForStdString:_nativeDataChannelInit.protocol];
+}
+
+- (void)setProtocol:(NSString *)protocol {
+ _nativeDataChannelInit.protocol = [NSString stdStringForString:protocol];
+}
+
+- (BOOL)isNegotiated {
+ return _nativeDataChannelInit.negotiated;
+}
+
+- (void)setIsNegotiated:(BOOL)isNegotiated {
+ _nativeDataChannelInit.negotiated = isNegotiated;
+}
+
+- (int)streamId {
+ return self.channelId;
+}
+
+- (void)setStreamId:(int)streamId {
+ self.channelId = streamId;
+}
+
+- (int)channelId {
+ return _nativeDataChannelInit.id;
+}
+
+- (void)setChannelId:(int)channelId {
+ _nativeDataChannelInit.id = channelId;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h
new file mode 100644
index 0000000000..49a62164cd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDtmfSender.h"
+
+#include "api/dtmf_sender_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCDtmfSender) : NSObject <RTC_OBJC_TYPE(RTCDtmfSender)>
+
+@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::DtmfSenderInterface> nativeDtmfSender;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Initialize an RTCDtmfSender with a native DtmfSenderInterface. */
+- (instancetype)initWithNativeDtmfSender:
+ (rtc::scoped_refptr<webrtc::DtmfSenderInterface>)nativeDtmfSender NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.h
new file mode 100644
index 0000000000..0f1b6ba4da
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCDtmfSender)<NSObject>
+
+ /**
+ * Returns true if this RTCDtmfSender is capable of sending DTMF. Otherwise
+ * returns false. To be able to send DTMF, the associated RTCRtpSender must be
+ * able to send packets, and a "telephone-event" codec must be negotiated.
+ */
+ @property(nonatomic, readonly) BOOL canInsertDtmf;
+
+/**
+ * Queues a task that sends the DTMF tones. The tones parameter is treated
+ * as a series of characters. The characters 0 through 9, A through D, #, and *
+ * generate the associated DTMF tones. The characters a to d are equivalent
+ * to A to D. The character ',' indicates a delay of 2 seconds before
+ * processing the next character in the tones parameter.
+ *
+ * Unrecognized characters are ignored.
+ *
+ * @param duration The parameter indicates the duration to use for each
+ * character passed in the tones parameter. The duration cannot be more
+ * than 6000 or less than 70 ms.
+ *
+ * @param interToneGap The parameter indicates the gap between tones.
+ * This parameter must be at least 50 ms but should be as short as
+ * possible.
+ *
+ * If InsertDtmf is called on the same object while an existing task for this
+ * object to generate DTMF is still running, the previous task is canceled.
+ * Returns true on success and false on failure.
+ */
+- (BOOL)insertDtmf:(nonnull NSString *)tones
+ duration:(NSTimeInterval)duration
+ interToneGap:(NSTimeInterval)interToneGap;
+
+/** The tones remaining to be played out */
+- (nonnull NSString *)remainingTones;
+
+/**
+ * The current tone duration value. This value will be the value last set via the
+ * insertDtmf method, or the default value of 100 ms if insertDtmf was never called.
+ */
+- (NSTimeInterval)duration;
+
+/**
+ * The current value of the between-tone gap. This value will be the value last set
+ * via the insertDtmf() method, or the default value of 50 ms if insertDtmf() was never
+ * called.
+ */
+- (NSTimeInterval)interToneGap;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.mm
new file mode 100644
index 0000000000..ee3b79cd37
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCDtmfSender.mm
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDtmfSender+Private.h"
+
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+#include "rtc_base/time_utils.h"
+
+@implementation RTC_OBJC_TYPE (RTCDtmfSender) {
+ rtc::scoped_refptr<webrtc::DtmfSenderInterface> _nativeDtmfSender;
+}
+
+- (BOOL)canInsertDtmf {
+ return _nativeDtmfSender->CanInsertDtmf();
+}
+
+- (BOOL)insertDtmf:(nonnull NSString *)tones
+ duration:(NSTimeInterval)duration
+ interToneGap:(NSTimeInterval)interToneGap {
+ RTC_DCHECK(tones != nil);
+
+ int durationMs = static_cast<int>(duration * rtc::kNumMillisecsPerSec);
+ int interToneGapMs = static_cast<int>(interToneGap * rtc::kNumMillisecsPerSec);
+ return _nativeDtmfSender->InsertDtmf(
+ [NSString stdStringForString:tones], durationMs, interToneGapMs);
+}
+
+- (nonnull NSString *)remainingTones {
+ return [NSString stringForStdString:_nativeDtmfSender->tones()];
+}
+
+- (NSTimeInterval)duration {
+ return static_cast<NSTimeInterval>(_nativeDtmfSender->duration()) / rtc::kNumMillisecsPerSec;
+}
+
+- (NSTimeInterval)interToneGap {
+ return static_cast<NSTimeInterval>(_nativeDtmfSender->inter_tone_gap()) /
+ rtc::kNumMillisecsPerSec;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCDtmfSender) {\n remainingTones: %@\n "
+ @"duration: %f sec\n interToneGap: %f sec\n}",
+ [self remainingTones],
+ [self duration],
+ [self interToneGap]];
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::DtmfSenderInterface>)nativeDtmfSender {
+ return _nativeDtmfSender;
+}
+
+- (instancetype)initWithNativeDtmfSender:
+ (rtc::scoped_refptr<webrtc::DtmfSenderInterface>)nativeDtmfSender {
+ NSParameterAssert(nativeDtmfSender);
+ if (self = [super init]) {
+ _nativeDtmfSender = nativeDtmfSender;
+ RTCLogInfo(
+ @"RTC_OBJC_TYPE(RTCDtmfSender)(%p): created DTMF sender: %@", self, self.description);
+ }
+ return self;
+}
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h
new file mode 100644
index 0000000000..a078b0aded
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "base/RTCEncodedImage.h"
+
+#include "api/video/encoded_image.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/* Interfaces for converting to/from internal C++ formats. */
+@interface RTC_OBJC_TYPE (RTCEncodedImage)
+(Private)
+
+ - (instancetype)initWithNativeEncodedImage : (const webrtc::EncodedImage &)encodedImage;
+- (webrtc::EncodedImage)nativeEncodedImage;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
new file mode 100644
index 0000000000..7f8ae739e0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCEncodedImage+Private.h"
+
+#import <objc/runtime.h>
+
+#include "rtc_base/numerics/safe_conversions.h"
+
+namespace {
+// An implementation of EncodedImageBufferInterface that doesn't perform any copies.
+class ObjCEncodedImageBuffer : public webrtc::EncodedImageBufferInterface {
+ public:
+ static rtc::scoped_refptr<ObjCEncodedImageBuffer> Create(NSData *data) {
+ return rtc::make_ref_counted<ObjCEncodedImageBuffer>(data);
+ }
+ const uint8_t *data() const override { return static_cast<const uint8_t *>(data_.bytes); }
+ // TODO(bugs.webrtc.org/9378): delete this non-const data method.
+ uint8_t *data() override {
+ return const_cast<uint8_t *>(static_cast<const uint8_t *>(data_.bytes));
+ }
+ size_t size() const override { return data_.length; }
+
+ protected:
+ explicit ObjCEncodedImageBuffer(NSData *data) : data_(data) {}
+ ~ObjCEncodedImageBuffer() {}
+
+ NSData *data_;
+};
+}
+
+// A simple wrapper around webrtc::EncodedImageBufferInterface to make it usable with associated
+// objects.
+@interface RTCWrappedEncodedImageBuffer : NSObject
+@property(nonatomic) rtc::scoped_refptr<webrtc::EncodedImageBufferInterface> buffer;
+- (instancetype)initWithEncodedImageBuffer:
+ (rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)buffer;
+@end
+@implementation RTCWrappedEncodedImageBuffer
+@synthesize buffer = _buffer;
+- (instancetype)initWithEncodedImageBuffer:
+ (rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)buffer {
+ self = [super init];
+ if (self) {
+ _buffer = buffer;
+ }
+ return self;
+}
+@end
+
+@implementation RTC_OBJC_TYPE (RTCEncodedImage)
+(Private)
+
+ - (rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)encodedData {
+ RTCWrappedEncodedImageBuffer *wrappedBuffer =
+ objc_getAssociatedObject(self, @selector(encodedData));
+ return wrappedBuffer.buffer;
+}
+
+- (void)setEncodedData:(rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)buffer {
+ return objc_setAssociatedObject(
+ self,
+ @selector(encodedData),
+ [[RTCWrappedEncodedImageBuffer alloc] initWithEncodedImageBuffer:buffer],
+ OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+}
+
+- (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encodedImage {
+ if (self = [super init]) {
+ // A reference to the encodedData must be stored so that it's kept alive as long
+ // self.buffer references its underlying data.
+ self.encodedData = encodedImage.GetEncodedData();
+ // Wrap the buffer in NSData without copying, do not take ownership.
+ self.buffer = [NSData dataWithBytesNoCopy:self.encodedData->data()
+ length:encodedImage.size()
+ freeWhenDone:NO];
+ self.encodedWidth = rtc::dchecked_cast<int32_t>(encodedImage._encodedWidth);
+ self.encodedHeight = rtc::dchecked_cast<int32_t>(encodedImage._encodedHeight);
+ self.timeStamp = encodedImage.Timestamp();
+ self.captureTimeMs = encodedImage.capture_time_ms_;
+ self.ntpTimeMs = encodedImage.ntp_time_ms_;
+ self.flags = encodedImage.timing_.flags;
+ self.encodeStartMs = encodedImage.timing_.encode_start_ms;
+ self.encodeFinishMs = encodedImage.timing_.encode_finish_ms;
+ self.frameType = static_cast<RTCFrameType>(encodedImage._frameType);
+ self.rotation = static_cast<RTCVideoRotation>(encodedImage.rotation_);
+ self.qp = @(encodedImage.qp_);
+ self.contentType = (encodedImage.content_type_ == webrtc::VideoContentType::SCREENSHARE) ?
+ RTCVideoContentTypeScreenshare :
+ RTCVideoContentTypeUnspecified;
+ }
+
+ return self;
+}
+
+- (webrtc::EncodedImage)nativeEncodedImage {
+ // Return the pointer without copying.
+ webrtc::EncodedImage encodedImage;
+ if (self.encodedData) {
+ encodedImage.SetEncodedData(self.encodedData);
+ } else if (self.buffer) {
+ encodedImage.SetEncodedData(ObjCEncodedImageBuffer::Create(self.buffer));
+ }
+ encodedImage.set_size(self.buffer.length);
+ encodedImage._encodedWidth = rtc::dchecked_cast<uint32_t>(self.encodedWidth);
+ encodedImage._encodedHeight = rtc::dchecked_cast<uint32_t>(self.encodedHeight);
+ encodedImage.SetTimestamp(self.timeStamp);
+ encodedImage.capture_time_ms_ = self.captureTimeMs;
+ encodedImage.ntp_time_ms_ = self.ntpTimeMs;
+ encodedImage.timing_.flags = self.flags;
+ encodedImage.timing_.encode_start_ms = self.encodeStartMs;
+ encodedImage.timing_.encode_finish_ms = self.encodeFinishMs;
+ encodedImage._frameType = webrtc::VideoFrameType(self.frameType);
+ encodedImage.rotation_ = webrtc::VideoRotation(self.rotation);
+ encodedImage.qp_ = self.qp ? self.qp.intValue : -1;
+ encodedImage.content_type_ = (self.contentType == RTCVideoContentTypeScreenshare) ?
+ webrtc::VideoContentType::SCREENSHARE :
+ webrtc::VideoContentType::UNSPECIFIED;
+
+ return encodedImage;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.h
new file mode 100644
index 0000000000..1f290d8a66
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+/** The only valid value for the following if set is kRTCFieldTrialEnabledValue. */
+RTC_EXTERN NSString * const kRTCFieldTrialAudioForceNoTWCCKey;
+RTC_EXTERN NSString * const kRTCFieldTrialAudioForceABWENoTWCCKey;
+RTC_EXTERN NSString * const kRTCFieldTrialSendSideBweWithOverheadKey;
+RTC_EXTERN NSString * const kRTCFieldTrialFlexFec03AdvertisedKey;
+RTC_EXTERN NSString * const kRTCFieldTrialFlexFec03Key;
+RTC_EXTERN NSString * const kRTCFieldTrialH264HighProfileKey;
+RTC_EXTERN NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey;
+RTC_EXTERN NSString *const kRTCFieldTrialUseNWPathMonitor;
+
+/** The valid value for field trials above. */
+RTC_EXTERN NSString * const kRTCFieldTrialEnabledValue;
+
+/** Initialize field trials using a dictionary mapping field trial keys to their
+ * values. See above for valid keys and values. Must be called before any other
+ * call into WebRTC. See: webrtc/system_wrappers/include/field_trial.h
+ */
+RTC_EXTERN void RTCInitFieldTrialDictionary(NSDictionary<NSString *, NSString *> *fieldTrials);
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.mm
new file mode 100644
index 0000000000..852aeeec84
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFieldTrials.mm
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCFieldTrials.h"
+
+#include <memory>
+
+#import "base/RTCLogging.h"
+
+#include "system_wrappers/include/field_trial.h"
+
+NSString * const kRTCFieldTrialAudioForceNoTWCCKey = @"WebRTC-Audio-ForceNoTWCC";
+NSString * const kRTCFieldTrialAudioForceABWENoTWCCKey = @"WebRTC-Audio-ABWENoTWCC";
+NSString * const kRTCFieldTrialSendSideBweWithOverheadKey = @"WebRTC-SendSideBwe-WithOverhead";
+NSString * const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised";
+NSString * const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03";
+NSString * const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile";
+NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey =
+ @"WebRTC-Audio-MinimizeResamplingOnMobile";
+NSString *const kRTCFieldTrialUseNWPathMonitor = @"WebRTC-Network-UseNWPathMonitor";
+NSString * const kRTCFieldTrialEnabledValue = @"Enabled";
+
+// InitFieldTrialsFromString stores the char*, so the char array must outlive
+// the application.
+static char *gFieldTrialInitString = nullptr;
+
+void RTCInitFieldTrialDictionary(NSDictionary<NSString *, NSString *> *fieldTrials) {
+ if (!fieldTrials) {
+ RTCLogWarning(@"No fieldTrials provided.");
+ return;
+ }
+ // Assemble the keys and values into the field trial string.
+ // We don't perform any extra format checking. That should be done by the underlying WebRTC calls.
+ NSMutableString *fieldTrialInitString = [NSMutableString string];
+ for (NSString *key in fieldTrials) {
+ NSString *fieldTrialEntry = [NSString stringWithFormat:@"%@/%@/", key, fieldTrials[key]];
+ [fieldTrialInitString appendString:fieldTrialEntry];
+ }
+ size_t len = fieldTrialInitString.length + 1;
+ if (gFieldTrialInitString != nullptr) {
+ delete[] gFieldTrialInitString;
+ }
+ gFieldTrialInitString = new char[len];
+ if (![fieldTrialInitString getCString:gFieldTrialInitString
+ maxLength:len
+ encoding:NSUTF8StringEncoding]) {
+ RTCLogError(@"Failed to convert field trial string.");
+ return;
+ }
+ webrtc::field_trial::InitFieldTrialsFromString(gFieldTrialInitString);
+}
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.h
new file mode 100644
index 0000000000..cb397c9633
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+typedef NS_ENUM(NSUInteger, RTCFileLoggerSeverity) {
+ RTCFileLoggerSeverityVerbose,
+ RTCFileLoggerSeverityInfo,
+ RTCFileLoggerSeverityWarning,
+ RTCFileLoggerSeverityError
+};
+
+typedef NS_ENUM(NSUInteger, RTCFileLoggerRotationType) {
+ RTCFileLoggerTypeCall,
+ RTCFileLoggerTypeApp,
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+// This class intercepts WebRTC logs and saves them to a file. The file size
+// will not exceed the given maximum bytesize. When the maximum bytesize is
+// reached, logs are rotated according to the rotationType specified.
+// For kRTCFileLoggerTypeCall, logs from the beginning and the end
+// are preserved while the middle section is overwritten instead.
+// For kRTCFileLoggerTypeApp, the oldest log is overwritten.
+// This class is not threadsafe.
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCFileLogger) : NSObject
+
+// The severity level to capture. The default is kRTCFileLoggerSeverityInfo.
+@property(nonatomic, assign) RTCFileLoggerSeverity severity;
+
+// The rotation type for this file logger. The default is
+// kRTCFileLoggerTypeCall.
+@property(nonatomic, readonly) RTCFileLoggerRotationType rotationType;
+
+// Disables buffering disk writes. Should be set before `start`. Buffering
+// is enabled by default for performance.
+@property(nonatomic, assign) BOOL shouldDisableBuffering;
+
+// Default constructor provides default settings for dir path, file size and
+// rotation type.
+- (instancetype)init;
+
+// Create file logger with default rotation type.
+- (instancetype)initWithDirPath:(NSString *)dirPath maxFileSize:(NSUInteger)maxFileSize;
+
+- (instancetype)initWithDirPath:(NSString *)dirPath
+ maxFileSize:(NSUInteger)maxFileSize
+ rotationType:(RTCFileLoggerRotationType)rotationType NS_DESIGNATED_INITIALIZER;
+
+// Starts writing WebRTC logs to disk if not already started. Overwrites any
+// existing file(s).
+- (void)start;
+
+// Stops writing WebRTC logs to disk. This method is also called on dealloc.
+- (void)stop;
+
+// Returns the current contents of the logs, or nil if start has been called
+// without a stop.
+- (nullable NSData *)logData;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.mm
new file mode 100644
index 0000000000..9562245611
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCFileLogger.mm
@@ -0,0 +1,170 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCFileLogger.h"
+
+#include <memory>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/file_rotating_stream.h"
+#include "rtc_base/log_sinks.h"
+#include "rtc_base/logging.h"
+
+NSString *const kDefaultLogDirName = @"webrtc_logs";
+NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
+const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
+
+@implementation RTC_OBJC_TYPE (RTCFileLogger) {
+ BOOL _hasStarted;
+ NSString *_dirPath;
+ NSUInteger _maxFileSize;
+ std::unique_ptr<rtc::FileRotatingLogSink> _logSink;
+}
+
+@synthesize severity = _severity;
+@synthesize rotationType = _rotationType;
+@synthesize shouldDisableBuffering = _shouldDisableBuffering;
+
+- (instancetype)init {
+ NSArray *paths = NSSearchPathForDirectoriesInDomains(
+ NSDocumentDirectory, NSUserDomainMask, YES);
+ NSString *documentsDirPath = [paths firstObject];
+ NSString *defaultDirPath =
+ [documentsDirPath stringByAppendingPathComponent:kDefaultLogDirName];
+ return [self initWithDirPath:defaultDirPath
+ maxFileSize:kDefaultMaxFileSize];
+}
+
+- (instancetype)initWithDirPath:(NSString *)dirPath
+ maxFileSize:(NSUInteger)maxFileSize {
+ return [self initWithDirPath:dirPath
+ maxFileSize:maxFileSize
+ rotationType:RTCFileLoggerTypeCall];
+}
+
+- (instancetype)initWithDirPath:(NSString *)dirPath
+ maxFileSize:(NSUInteger)maxFileSize
+ rotationType:(RTCFileLoggerRotationType)rotationType {
+ NSParameterAssert(dirPath.length);
+ NSParameterAssert(maxFileSize);
+ if (self = [super init]) {
+ BOOL isDir = NO;
+ NSFileManager *fileManager = [NSFileManager defaultManager];
+ if ([fileManager fileExistsAtPath:dirPath isDirectory:&isDir]) {
+ if (!isDir) {
+ // Bail if something already exists there.
+ return nil;
+ }
+ } else {
+ if (![fileManager createDirectoryAtPath:dirPath
+ withIntermediateDirectories:NO
+ attributes:nil
+ error:nil]) {
+ // Bail if we failed to create a directory.
+ return nil;
+ }
+ }
+ _dirPath = dirPath;
+ _maxFileSize = maxFileSize;
+ _severity = RTCFileLoggerSeverityInfo;
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self stop];
+}
+
+- (void)start {
+ if (_hasStarted) {
+ return;
+ }
+ switch (_rotationType) {
+ case RTCFileLoggerTypeApp:
+ _logSink.reset(
+ new rtc::FileRotatingLogSink(_dirPath.UTF8String,
+ kRTCFileLoggerRotatingLogPrefix,
+ _maxFileSize,
+ _maxFileSize / 10));
+ break;
+ case RTCFileLoggerTypeCall:
+ _logSink.reset(
+ new rtc::CallSessionFileRotatingLogSink(_dirPath.UTF8String,
+ _maxFileSize));
+ break;
+ }
+ if (!_logSink->Init()) {
+ RTC_LOG(LS_ERROR) << "Failed to open log files at path: " << _dirPath.UTF8String;
+ _logSink.reset();
+ return;
+ }
+ if (_shouldDisableBuffering) {
+ _logSink->DisableBuffering();
+ }
+ rtc::LogMessage::LogThreads(true);
+ rtc::LogMessage::LogTimestamps(true);
+ rtc::LogMessage::AddLogToStream(_logSink.get(), [self rtcSeverity]);
+ _hasStarted = YES;
+}
+
+- (void)stop {
+ if (!_hasStarted) {
+ return;
+ }
+ RTC_DCHECK(_logSink);
+ rtc::LogMessage::RemoveLogToStream(_logSink.get());
+ _hasStarted = NO;
+ _logSink.reset();
+}
+
+- (nullable NSData *)logData {
+ if (_hasStarted) {
+ return nil;
+ }
+ NSMutableData* logData = [NSMutableData data];
+ std::unique_ptr<rtc::FileRotatingStreamReader> stream;
+ switch(_rotationType) {
+ case RTCFileLoggerTypeApp:
+ stream = std::make_unique<rtc::FileRotatingStreamReader>(_dirPath.UTF8String,
+ kRTCFileLoggerRotatingLogPrefix);
+ break;
+ case RTCFileLoggerTypeCall:
+ stream = std::make_unique<rtc::CallSessionFileRotatingStreamReader>(_dirPath.UTF8String);
+ break;
+ }
+ size_t bufferSize = stream->GetSize();
+ if (bufferSize == 0) {
+ return logData;
+ }
+ // Allocate memory using malloc so we can pass it direcly to NSData without
+ // copying.
+ std::unique_ptr<uint8_t[]> buffer(static_cast<uint8_t*>(malloc(bufferSize)));
+ size_t read = stream->ReadAll(buffer.get(), bufferSize);
+ logData = [[NSMutableData alloc] initWithBytesNoCopy:buffer.release()
+ length:read];
+ return logData;
+}
+
+#pragma mark - Private
+
+- (rtc::LoggingSeverity)rtcSeverity {
+ switch (_severity) {
+ case RTCFileLoggerSeverityVerbose:
+ return rtc::LS_VERBOSE;
+ case RTCFileLoggerSeverityInfo:
+ return rtc::LS_INFO;
+ case RTCFileLoggerSeverityWarning:
+ return rtc::LS_WARNING;
+ case RTCFileLoggerSeverityError:
+ return rtc::LS_ERROR;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h
new file mode 100644
index 0000000000..409e16b608
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceCandidate.h"
+
+#include <memory>
+
+#include "api/jsep.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCIceCandidate)
+()
+
+ /**
+ * The native IceCandidateInterface representation of this RTCIceCandidate
+ * object. This is needed to pass to the underlying C++ APIs.
+ */
+ @property(nonatomic, readonly) std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate;
+
+/**
+ * Initialize an RTCIceCandidate from a native IceCandidateInterface. No
+ * ownership is taken of the native candidate.
+ */
+- (instancetype)initWithNativeCandidate:(const webrtc::IceCandidateInterface *)candidate;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.h
new file mode 100644
index 0000000000..f84843af6c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCIceCandidate) : NSObject
+
+/**
+ * If present, the identifier of the "media stream identification" for the media
+ * component this candidate is associated with.
+ */
+@property(nonatomic, readonly, nullable) NSString *sdpMid;
+
+/**
+ * The index (starting at zero) of the media description this candidate is
+ * associated with in the SDP.
+ */
+@property(nonatomic, readonly) int sdpMLineIndex;
+
+/** The SDP string for this candidate. */
+@property(nonatomic, readonly) NSString *sdp;
+
+/** The URL of the ICE server which this candidate is gathered from. */
+@property(nonatomic, readonly, nullable) NSString *serverUrl;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Initialize an RTCIceCandidate from SDP.
+ */
+- (instancetype)initWithSdp:(NSString *)sdp
+ sdpMLineIndex:(int)sdpMLineIndex
+ sdpMid:(nullable NSString *)sdpMid NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.mm
new file mode 100644
index 0000000000..48385ef5b4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidate.mm
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceCandidate+Private.h"
+
+#include <memory>
+
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCIceCandidate)
+
+@synthesize sdpMid = _sdpMid;
+@synthesize sdpMLineIndex = _sdpMLineIndex;
+@synthesize sdp = _sdp;
+@synthesize serverUrl = _serverUrl;
+
+- (instancetype)initWithSdp:(NSString *)sdp
+ sdpMLineIndex:(int)sdpMLineIndex
+ sdpMid:(NSString *)sdpMid {
+ NSParameterAssert(sdp.length);
+ if (self = [super init]) {
+ _sdpMid = [sdpMid copy];
+ _sdpMLineIndex = sdpMLineIndex;
+ _sdp = [sdp copy];
+ }
+ return self;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCIceCandidate):\n%@\n%d\n%@\n%@",
+ _sdpMid,
+ _sdpMLineIndex,
+ _sdp,
+ _serverUrl];
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeCandidate:
+ (const webrtc::IceCandidateInterface *)candidate {
+ NSParameterAssert(candidate);
+ std::string sdp;
+ candidate->ToString(&sdp);
+
+ RTC_OBJC_TYPE(RTCIceCandidate) *rtcCandidate =
+ [self initWithSdp:[NSString stringForStdString:sdp]
+ sdpMLineIndex:candidate->sdp_mline_index()
+ sdpMid:[NSString stringForStdString:candidate->sdp_mid()]];
+ rtcCandidate->_serverUrl = [NSString stringForStdString:candidate->server_url()];
+ return rtcCandidate;
+}
+
+- (std::unique_ptr<webrtc::IceCandidateInterface>)nativeCandidate {
+ webrtc::SdpParseError error;
+
+ webrtc::IceCandidateInterface *candidate = webrtc::CreateIceCandidate(
+ _sdpMid.stdString, _sdpMLineIndex, _sdp.stdString, &error);
+
+ if (!candidate) {
+ RTCLog(@"Failed to create ICE candidate: %s\nline: %s",
+ error.description.c_str(),
+ error.line.c_str());
+ }
+
+ return std::unique_ptr<webrtc::IceCandidateInterface>(candidate);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent+Private.h
new file mode 100644
index 0000000000..8502da08a8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent+Private.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceCandidateErrorEvent.h"
+
+#include <string>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCIceCandidateErrorEvent)
+()
+
+ - (instancetype)initWithAddress : (const std::string&)address port : (const int)port url
+ : (const std::string&)url errorCode : (const int)errorCode errorText
+ : (const std::string&)errorText;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.h
new file mode 100644
index 0000000000..e0906fdbdd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCIceCandidateErrorEvent) : NSObject
+
+/** The local IP address used to communicate with the STUN or TURN server. */
+@property(nonatomic, readonly) NSString *address;
+
+/** The port used to communicate with the STUN or TURN server. */
+@property(nonatomic, readonly) int port;
+
+/** The STUN or TURN URL that identifies the STUN or TURN server for which the failure occurred. */
+@property(nonatomic, readonly) NSString *url;
+
+/** The numeric STUN error code returned by the STUN or TURN server. If no host candidate can reach
+ * the server, errorCode will be set to the value 701 which is outside the STUN error code range.
+ * This error is only fired once per server URL while in the RTCIceGatheringState of "gathering". */
+@property(nonatomic, readonly) int errorCode;
+
+/** The STUN reason text returned by the STUN or TURN server. If the server could not be reached,
+ * errorText will be set to an implementation-specific value providing details about the error. */
+@property(nonatomic, readonly) NSString *errorText;
+
+- (instancetype)init NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.mm
new file mode 100644
index 0000000000..573e30642b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceCandidateErrorEvent.mm
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceCandidateErrorEvent+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCIceCandidateErrorEvent)
+
+@synthesize address = _address;
+@synthesize port = _port;
+@synthesize url = _url;
+@synthesize errorCode = _errorCode;
+@synthesize errorText = _errorText;
+
+- (instancetype)init {
+ return [super init];
+}
+
+- (instancetype)initWithAddress:(const std::string&)address
+ port:(const int)port
+ url:(const std::string&)url
+ errorCode:(const int)errorCode
+ errorText:(const std::string&)errorText {
+ if (self = [self init]) {
+ _address = [NSString stringForStdString:address];
+ _port = port;
+ _url = [NSString stringForStdString:url];
+ _errorCode = errorCode;
+ _errorText = [NSString stringForStdString:errorText];
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer+Private.h
new file mode 100644
index 0000000000..3eee819965
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer+Private.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceServer.h"
+
+#include "api/peer_connection_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCIceServer)
+()
+
+ /**
+ * IceServer struct representation of this RTCIceServer object's data.
+ * This is needed to pass to the underlying C++ APIs.
+ */
+ @property(nonatomic, readonly) webrtc::PeerConnectionInterface::IceServer nativeServer;
+
+/** Initialize an RTCIceServer from a native IceServer. */
+- (instancetype)initWithNativeServer:(webrtc::PeerConnectionInterface::IceServer)nativeServer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.h
new file mode 100644
index 0000000000..7ddcbc1a1f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+typedef NS_ENUM(NSUInteger, RTCTlsCertPolicy) {
+ RTCTlsCertPolicySecure,
+ RTCTlsCertPolicyInsecureNoCheck
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCIceServer) : NSObject
+
+/** URI(s) for this server represented as NSStrings. */
+@property(nonatomic, readonly) NSArray<NSString *> *urlStrings;
+
+/** Username to use if this RTCIceServer object is a TURN server. */
+@property(nonatomic, readonly, nullable) NSString *username;
+
+/** Credential to use if this RTCIceServer object is a TURN server. */
+@property(nonatomic, readonly, nullable) NSString *credential;
+
+/**
+ * TLS certificate policy to use if this RTCIceServer object is a TURN server.
+ */
+@property(nonatomic, readonly) RTCTlsCertPolicy tlsCertPolicy;
+
+/**
+ If the URIs in `urls` only contain IP addresses, this field can be used
+ to indicate the hostname, which may be necessary for TLS (using the SNI
+ extension). If `urls` itself contains the hostname, this isn't necessary.
+ */
+@property(nonatomic, readonly, nullable) NSString *hostname;
+
+/** List of protocols to be used in the TLS ALPN extension. */
+@property(nonatomic, readonly) NSArray<NSString *> *tlsAlpnProtocols;
+
+/**
+ List elliptic curves to be used in the TLS elliptic curves extension.
+ Only curve names supported by OpenSSL should be used (eg. "P-256","X25519").
+ */
+@property(nonatomic, readonly) NSArray<NSString *> *tlsEllipticCurves;
+
+- (nonnull instancetype)init NS_UNAVAILABLE;
+
+/** Convenience initializer for a server with no authentication (e.g. STUN). */
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings;
+
+/**
+ * Initialize an RTCIceServer with its associated URLs, optional username,
+ * optional credential, and credentialType.
+ */
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(nullable NSString *)username
+ credential:(nullable NSString *)credential;
+
+/**
+ * Initialize an RTCIceServer with its associated URLs, optional username,
+ * optional credential, and TLS cert policy.
+ */
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(nullable NSString *)username
+ credential:(nullable NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy;
+
+/**
+ * Initialize an RTCIceServer with its associated URLs, optional username,
+ * optional credential, TLS cert policy and hostname.
+ */
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(nullable NSString *)username
+ credential:(nullable NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
+ hostname:(nullable NSString *)hostname;
+
+/**
+ * Initialize an RTCIceServer with its associated URLs, optional username,
+ * optional credential, TLS cert policy, hostname and ALPN protocols.
+ */
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(nullable NSString *)username
+ credential:(nullable NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
+ hostname:(nullable NSString *)hostname
+ tlsAlpnProtocols:(NSArray<NSString *> *)tlsAlpnProtocols;
+
+/**
+ * Initialize an RTCIceServer with its associated URLs, optional username,
+ * optional credential, TLS cert policy, hostname, ALPN protocols and
+ * elliptic curves.
+ */
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(nullable NSString *)username
+ credential:(nullable NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
+ hostname:(nullable NSString *)hostname
+ tlsAlpnProtocols:(nullable NSArray<NSString *> *)tlsAlpnProtocols
+ tlsEllipticCurves:(nullable NSArray<NSString *> *)tlsEllipticCurves
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.mm
new file mode 100644
index 0000000000..19a0a7e9e8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCIceServer.mm
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCIceServer+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCIceServer)
+
+@synthesize urlStrings = _urlStrings;
+@synthesize username = _username;
+@synthesize credential = _credential;
+@synthesize tlsCertPolicy = _tlsCertPolicy;
+@synthesize hostname = _hostname;
+@synthesize tlsAlpnProtocols = _tlsAlpnProtocols;
+@synthesize tlsEllipticCurves = _tlsEllipticCurves;
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings {
+ return [self initWithURLStrings:urlStrings
+ username:nil
+ credential:nil];
+}
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(NSString *)username
+ credential:(NSString *)credential {
+ return [self initWithURLStrings:urlStrings
+ username:username
+ credential:credential
+ tlsCertPolicy:RTCTlsCertPolicySecure];
+}
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(NSString *)username
+ credential:(NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy {
+ return [self initWithURLStrings:urlStrings
+ username:username
+ credential:credential
+ tlsCertPolicy:tlsCertPolicy
+ hostname:nil];
+}
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(NSString *)username
+ credential:(NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
+ hostname:(NSString *)hostname {
+ return [self initWithURLStrings:urlStrings
+ username:username
+ credential:credential
+ tlsCertPolicy:tlsCertPolicy
+ hostname:hostname
+ tlsAlpnProtocols:[NSArray array]];
+}
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(NSString *)username
+ credential:(NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
+ hostname:(NSString *)hostname
+ tlsAlpnProtocols:(NSArray<NSString *> *)tlsAlpnProtocols {
+ return [self initWithURLStrings:urlStrings
+ username:username
+ credential:credential
+ tlsCertPolicy:tlsCertPolicy
+ hostname:hostname
+ tlsAlpnProtocols:tlsAlpnProtocols
+ tlsEllipticCurves:[NSArray array]];
+}
+
+- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
+ username:(NSString *)username
+ credential:(NSString *)credential
+ tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
+ hostname:(NSString *)hostname
+ tlsAlpnProtocols:(NSArray<NSString *> *)tlsAlpnProtocols
+ tlsEllipticCurves:(NSArray<NSString *> *)tlsEllipticCurves {
+ NSParameterAssert(urlStrings.count);
+ if (self = [super init]) {
+ _urlStrings = [[NSArray alloc] initWithArray:urlStrings copyItems:YES];
+ _username = [username copy];
+ _credential = [credential copy];
+ _tlsCertPolicy = tlsCertPolicy;
+ _hostname = [hostname copy];
+ _tlsAlpnProtocols = [[NSArray alloc] initWithArray:tlsAlpnProtocols copyItems:YES];
+ _tlsEllipticCurves = [[NSArray alloc] initWithArray:tlsEllipticCurves copyItems:YES];
+ }
+ return self;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCIceServer):\n%@\n%@\n%@\n%@\n%@\n%@\n%@",
+ _urlStrings,
+ _username,
+ _credential,
+ [self stringForTlsCertPolicy:_tlsCertPolicy],
+ _hostname,
+ _tlsAlpnProtocols,
+ _tlsEllipticCurves];
+}
+
+#pragma mark - Private
+
+- (NSString *)stringForTlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy {
+ switch (tlsCertPolicy) {
+ case RTCTlsCertPolicySecure:
+ return @"RTCTlsCertPolicySecure";
+ case RTCTlsCertPolicyInsecureNoCheck:
+ return @"RTCTlsCertPolicyInsecureNoCheck";
+ }
+}
+
+- (webrtc::PeerConnectionInterface::IceServer)nativeServer {
+ __block webrtc::PeerConnectionInterface::IceServer iceServer;
+
+ iceServer.username = [NSString stdStringForString:_username];
+ iceServer.password = [NSString stdStringForString:_credential];
+ iceServer.hostname = [NSString stdStringForString:_hostname];
+
+ [_tlsAlpnProtocols enumerateObjectsUsingBlock:^(NSString *proto, NSUInteger idx, BOOL *stop) {
+ iceServer.tls_alpn_protocols.push_back(proto.stdString);
+ }];
+
+ [_tlsEllipticCurves enumerateObjectsUsingBlock:^(NSString *curve, NSUInteger idx, BOOL *stop) {
+ iceServer.tls_elliptic_curves.push_back(curve.stdString);
+ }];
+
+ [_urlStrings enumerateObjectsUsingBlock:^(NSString *url,
+ NSUInteger idx,
+ BOOL *stop) {
+ iceServer.urls.push_back(url.stdString);
+ }];
+
+ switch (_tlsCertPolicy) {
+ case RTCTlsCertPolicySecure:
+ iceServer.tls_cert_policy =
+ webrtc::PeerConnectionInterface::kTlsCertPolicySecure;
+ break;
+ case RTCTlsCertPolicyInsecureNoCheck:
+ iceServer.tls_cert_policy =
+ webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck;
+ break;
+ }
+ return iceServer;
+}
+
+- (instancetype)initWithNativeServer:
+ (webrtc::PeerConnectionInterface::IceServer)nativeServer {
+ NSMutableArray *urls =
+ [NSMutableArray arrayWithCapacity:nativeServer.urls.size()];
+ for (auto const &url : nativeServer.urls) {
+ [urls addObject:[NSString stringForStdString:url]];
+ }
+ NSString *username = [NSString stringForStdString:nativeServer.username];
+ NSString *credential = [NSString stringForStdString:nativeServer.password];
+ NSString *hostname = [NSString stringForStdString:nativeServer.hostname];
+ NSMutableArray *tlsAlpnProtocols =
+ [NSMutableArray arrayWithCapacity:nativeServer.tls_alpn_protocols.size()];
+ for (auto const &proto : nativeServer.tls_alpn_protocols) {
+ [tlsAlpnProtocols addObject:[NSString stringForStdString:proto]];
+ }
+ NSMutableArray *tlsEllipticCurves =
+ [NSMutableArray arrayWithCapacity:nativeServer.tls_elliptic_curves.size()];
+ for (auto const &curve : nativeServer.tls_elliptic_curves) {
+ [tlsEllipticCurves addObject:[NSString stringForStdString:curve]];
+ }
+ RTCTlsCertPolicy tlsCertPolicy;
+
+ switch (nativeServer.tls_cert_policy) {
+ case webrtc::PeerConnectionInterface::kTlsCertPolicySecure:
+ tlsCertPolicy = RTCTlsCertPolicySecure;
+ break;
+ case webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck:
+ tlsCertPolicy = RTCTlsCertPolicyInsecureNoCheck;
+ break;
+ }
+
+ self = [self initWithURLStrings:urls
+ username:username
+ credential:credential
+ tlsCertPolicy:tlsCertPolicy
+ hostname:hostname
+ tlsAlpnProtocols:tlsAlpnProtocols
+ tlsEllipticCurves:tlsEllipticCurves];
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h
new file mode 100644
index 0000000000..faa7962821
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCLegacyStatsReport.h"
+
+#include "api/stats_types.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCLegacyStatsReport)
+()
+
+ /** Initialize an RTCLegacyStatsReport object from a native StatsReport. */
+ - (instancetype)initWithNativeReport : (const webrtc::StatsReport &)nativeReport;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h
new file mode 100644
index 0000000000..b3bd12c5d7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** This does not currently conform to the spec. */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCLegacyStatsReport) : NSObject
+
+/** Time since 1970-01-01T00:00:00Z in milliseconds. */
+@property(nonatomic, readonly) CFTimeInterval timestamp;
+
+/** The type of stats held by this object. */
+@property(nonatomic, readonly) NSString *type;
+
+/** The identifier for this object. */
+@property(nonatomic, readonly) NSString *reportId;
+
+/** A dictionary holding the actual stats. */
+@property(nonatomic, readonly) NSDictionary<NSString *, NSString *> *values;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm
new file mode 100644
index 0000000000..bd7a1ad9c9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCLegacyStatsReport+Private.h"
+
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+#include "rtc_base/checks.h"
+
+@implementation RTC_OBJC_TYPE (RTCLegacyStatsReport)
+
+@synthesize timestamp = _timestamp;
+@synthesize type = _type;
+@synthesize reportId = _reportId;
+@synthesize values = _values;
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCLegacyStatsReport):\n%@\n%@\n%f\n%@",
+ _reportId,
+ _type,
+ _timestamp,
+ _values];
+}
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport {
+ if (self = [super init]) {
+ _timestamp = nativeReport.timestamp();
+ _type = [NSString stringForStdString:nativeReport.TypeToString()];
+ _reportId = [NSString stringForStdString:
+ nativeReport.id()->ToString()];
+
+ NSUInteger capacity = nativeReport.values().size();
+ NSMutableDictionary *values =
+ [NSMutableDictionary dictionaryWithCapacity:capacity];
+ for (auto const &valuePair : nativeReport.values()) {
+ NSString *key = [NSString stringForStdString:
+ valuePair.second->display_name()];
+ NSString *value = [NSString stringForStdString:
+ valuePair.second->ToString()];
+
+ // Not expecting duplicate keys.
+ RTC_DCHECK(![values objectForKey:key]);
+ [values setObject:value forKey:key];
+ }
+ _values = values;
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h
new file mode 100644
index 0000000000..97eee8307d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaConstraints.h"
+
+#include <memory>
+
+#include "sdk/media_constraints.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCMediaConstraints)
+()
+
+ /**
+ * A MediaConstraints representation of this RTCMediaConstraints object. This is
+ * needed to pass to the underlying C++ APIs.
+ */
+ - (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints;
+
+/** Return a native Constraints object representing these constraints */
++ (webrtc::MediaConstraints::Constraints)nativeConstraintsForConstraints:
+ (NSDictionary<NSString*, NSString*>*)constraints;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.h
new file mode 100644
index 0000000000..c5baf20c1d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Constraint keys for media sources. */
+/** The value for this key should be a base64 encoded string containing
+ * the data from the serialized configuration proto.
+ */
+RTC_EXTERN NSString *const kRTCMediaConstraintsAudioNetworkAdaptorConfig;
+
+/** Constraint keys for generating offers and answers. */
+RTC_EXTERN NSString *const kRTCMediaConstraintsIceRestart;
+RTC_EXTERN NSString *const kRTCMediaConstraintsOfferToReceiveAudio;
+RTC_EXTERN NSString *const kRTCMediaConstraintsOfferToReceiveVideo;
+RTC_EXTERN NSString *const kRTCMediaConstraintsVoiceActivityDetection;
+
+/** Constraint values for Boolean parameters. */
+RTC_EXTERN NSString *const kRTCMediaConstraintsValueTrue;
+RTC_EXTERN NSString *const kRTCMediaConstraintsValueFalse;
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMediaConstraints) : NSObject
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Initialize with mandatory and/or optional constraints. */
+- (instancetype)
+ initWithMandatoryConstraints:(nullable NSDictionary<NSString *, NSString *> *)mandatory
+ optionalConstraints:(nullable NSDictionary<NSString *, NSString *> *)optional
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.mm
new file mode 100644
index 0000000000..0f46e4b8fe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaConstraints.mm
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaConstraints+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+#include <memory>
+
+NSString *const kRTCMediaConstraintsAudioNetworkAdaptorConfig =
+ @(webrtc::MediaConstraints::kAudioNetworkAdaptorConfig);
+
+NSString *const kRTCMediaConstraintsIceRestart = @(webrtc::MediaConstraints::kIceRestart);
+NSString *const kRTCMediaConstraintsOfferToReceiveAudio =
+ @(webrtc::MediaConstraints::kOfferToReceiveAudio);
+NSString *const kRTCMediaConstraintsOfferToReceiveVideo =
+ @(webrtc::MediaConstraints::kOfferToReceiveVideo);
+NSString *const kRTCMediaConstraintsVoiceActivityDetection =
+ @(webrtc::MediaConstraints::kVoiceActivityDetection);
+
+NSString *const kRTCMediaConstraintsValueTrue = @(webrtc::MediaConstraints::kValueTrue);
+NSString *const kRTCMediaConstraintsValueFalse = @(webrtc::MediaConstraints::kValueFalse);
+
+@implementation RTC_OBJC_TYPE (RTCMediaConstraints) {
+ NSDictionary<NSString *, NSString *> *_mandatory;
+ NSDictionary<NSString *, NSString *> *_optional;
+}
+
+- (instancetype)initWithMandatoryConstraints:
+ (NSDictionary<NSString *, NSString *> *)mandatory
+ optionalConstraints:
+ (NSDictionary<NSString *, NSString *> *)optional {
+ if (self = [super init]) {
+ _mandatory = [[NSDictionary alloc] initWithDictionary:mandatory
+ copyItems:YES];
+ _optional = [[NSDictionary alloc] initWithDictionary:optional
+ copyItems:YES];
+ }
+ return self;
+}
+
+- (NSString *)description {
+ return [NSString
+ stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaConstraints):\n%@\n%@", _mandatory, _optional];
+}
+
+#pragma mark - Private
+
+- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints {
+ webrtc::MediaConstraints::Constraints mandatory =
+ [[self class] nativeConstraintsForConstraints:_mandatory];
+ webrtc::MediaConstraints::Constraints optional =
+ [[self class] nativeConstraintsForConstraints:_optional];
+
+ webrtc::MediaConstraints *nativeConstraints =
+ new webrtc::MediaConstraints(mandatory, optional);
+ return std::unique_ptr<webrtc::MediaConstraints>(nativeConstraints);
+}
+
++ (webrtc::MediaConstraints::Constraints)nativeConstraintsForConstraints:
+ (NSDictionary<NSString *, NSString *> *)constraints {
+ webrtc::MediaConstraints::Constraints nativeConstraints;
+ for (NSString *key in constraints) {
+ NSAssert([key isKindOfClass:[NSString class]],
+ @"%@ is not an NSString.", key);
+ NSString *value = [constraints objectForKey:key];
+ NSAssert([value isKindOfClass:[NSString class]],
+ @"%@ is not an NSString.", value);
+ if ([kRTCMediaConstraintsAudioNetworkAdaptorConfig isEqualToString:key]) {
+ // This value is base64 encoded.
+ NSData *charData = [[NSData alloc] initWithBase64EncodedString:value options:0];
+ std::string configValue =
+ std::string(reinterpret_cast<const char *>(charData.bytes), charData.length);
+ nativeConstraints.push_back(webrtc::MediaConstraints::Constraint(key.stdString, configValue));
+ } else {
+ nativeConstraints.push_back(
+ webrtc::MediaConstraints::Constraint(key.stdString, value.stdString));
+ }
+ }
+ return nativeConstraints;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource+Private.h
new file mode 100644
index 0000000000..edda892e50
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource+Private.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaSource.h"
+
+#include "api/media_stream_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+
+typedef NS_ENUM(NSInteger, RTCMediaSourceType) {
+ RTCMediaSourceTypeAudio,
+ RTCMediaSourceTypeVideo,
+};
+
+@interface RTC_OBJC_TYPE (RTCMediaSource)
+()
+
+ @property(nonatomic,
+ readonly) rtc::scoped_refptr<webrtc::MediaSourceInterface> nativeMediaSource;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
+ type:(RTCMediaSourceType)type NS_DESIGNATED_INITIALIZER;
+
++ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:(RTCSourceState)state;
+
++ (RTCSourceState)sourceStateForNativeState:(webrtc::MediaSourceInterface::SourceState)nativeState;
+
++ (NSString *)stringForState:(RTCSourceState)state;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.h
new file mode 100644
index 0000000000..ba19c2a352
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+typedef NS_ENUM(NSInteger, RTCSourceState) {
+ RTCSourceStateInitializing,
+ RTCSourceStateLive,
+ RTCSourceStateEnded,
+ RTCSourceStateMuted,
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMediaSource) : NSObject
+
+/** The current state of the RTCMediaSource. */
+@property(nonatomic, readonly) RTCSourceState state;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.mm
new file mode 100644
index 0000000000..61472a782a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaSource.mm
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaSource+Private.h"
+
+#include "rtc_base/checks.h"
+
+@implementation RTC_OBJC_TYPE (RTCMediaSource) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ RTCMediaSourceType _type;
+}
+
+@synthesize nativeMediaSource = _nativeMediaSource;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
+ type:(RTCMediaSourceType)type {
+ RTC_DCHECK(factory);
+ RTC_DCHECK(nativeMediaSource);
+ if (self = [super init]) {
+ _factory = factory;
+ _nativeMediaSource = nativeMediaSource;
+ _type = type;
+ }
+ return self;
+}
+
+- (RTCSourceState)state {
+ return [[self class] sourceStateForNativeState:_nativeMediaSource->state()];
+}
+
+#pragma mark - Private
+
++ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:
+ (RTCSourceState)state {
+ switch (state) {
+ case RTCSourceStateInitializing:
+ return webrtc::MediaSourceInterface::kInitializing;
+ case RTCSourceStateLive:
+ return webrtc::MediaSourceInterface::kLive;
+ case RTCSourceStateEnded:
+ return webrtc::MediaSourceInterface::kEnded;
+ case RTCSourceStateMuted:
+ return webrtc::MediaSourceInterface::kMuted;
+ }
+}
+
++ (RTCSourceState)sourceStateForNativeState:
+ (webrtc::MediaSourceInterface::SourceState)nativeState {
+ switch (nativeState) {
+ case webrtc::MediaSourceInterface::kInitializing:
+ return RTCSourceStateInitializing;
+ case webrtc::MediaSourceInterface::kLive:
+ return RTCSourceStateLive;
+ case webrtc::MediaSourceInterface::kEnded:
+ return RTCSourceStateEnded;
+ case webrtc::MediaSourceInterface::kMuted:
+ return RTCSourceStateMuted;
+ }
+}
+
++ (NSString *)stringForState:(RTCSourceState)state {
+ switch (state) {
+ case RTCSourceStateInitializing:
+ return @"Initializing";
+ case RTCSourceStateLive:
+ return @"Live";
+ case RTCSourceStateEnded:
+ return @"Ended";
+ case RTCSourceStateMuted:
+ return @"Muted";
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream+Private.h
new file mode 100644
index 0000000000..6c8a602766
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream+Private.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaStream.h"
+
+#include "api/media_stream_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCMediaStream)
+()
+
+ /**
+ * MediaStreamInterface representation of this RTCMediaStream object. This is
+ * needed to pass to the underlying C++ APIs.
+ */
+ @property(nonatomic,
+ readonly) rtc::scoped_refptr<webrtc::MediaStreamInterface> nativeMediaStream;
+
+/** Initialize an RTCMediaStream with an id. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ streamId:(NSString *)streamId;
+
+/** Initialize an RTCMediaStream from a native MediaStreamInterface. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaStream:(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.h
new file mode 100644
index 0000000000..2d56f15c7d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCAudioTrack);
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+@class RTC_OBJC_TYPE(RTCVideoTrack);
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMediaStream) : NSObject
+
+/** The audio tracks in this stream. */
+@property(nonatomic, strong, readonly) NSArray<RTC_OBJC_TYPE(RTCAudioTrack) *> *audioTracks;
+
+/** The video tracks in this stream. */
+@property(nonatomic, strong, readonly) NSArray<RTC_OBJC_TYPE(RTCVideoTrack) *> *videoTracks;
+
+/** An identifier for this media stream. */
+@property(nonatomic, readonly) NSString *streamId;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Adds the given audio track to this media stream. */
+- (void)addAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack;
+
+/** Adds the given video track to this media stream. */
+- (void)addVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack;
+
+/** Removes the given audio track to this media stream. */
+- (void)removeAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack;
+
+/** Removes the given video track to this media stream. */
+- (void)removeVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.mm
new file mode 100644
index 0000000000..beb4a7a91b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStream.mm
@@ -0,0 +1,157 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaStream+Private.h"
+
+#import "RTCAudioTrack+Private.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCVideoTrack+Private.h"
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCMediaStream) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ rtc::Thread *_signalingThread;
+ NSMutableArray *_audioTracks /* accessed on _signalingThread */;
+ NSMutableArray *_videoTracks /* accessed on _signalingThread */;
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> _nativeMediaStream;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ streamId:(NSString *)streamId {
+ NSParameterAssert(factory);
+ NSParameterAssert(streamId.length);
+ std::string nativeId = [NSString stdStringForString:streamId];
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ factory.nativeFactory->CreateLocalMediaStream(nativeId);
+ return [self initWithFactory:factory nativeMediaStream:stream];
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCAudioTrack) *> *)audioTracks {
+ if (!_signalingThread->IsCurrent()) {
+ return _signalingThread->Invoke<NSArray<RTC_OBJC_TYPE(RTCAudioTrack) *> *>(
+ RTC_FROM_HERE, [self]() { return self.audioTracks; });
+ }
+ return [_audioTracks copy];
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCVideoTrack) *> *)videoTracks {
+ if (!_signalingThread->IsCurrent()) {
+ return _signalingThread->Invoke<NSArray<RTC_OBJC_TYPE(RTCVideoTrack) *> *>(
+ RTC_FROM_HERE, [self]() { return self.videoTracks; });
+ }
+ return [_videoTracks copy];
+}
+
+- (NSString *)streamId {
+ return [NSString stringForStdString:_nativeMediaStream->id()];
+}
+
+- (void)addAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack {
+ if (!_signalingThread->IsCurrent()) {
+ return _signalingThread->Invoke<void>(
+ RTC_FROM_HERE, [audioTrack, self]() { return [self addAudioTrack:audioTrack]; });
+ }
+ if (_nativeMediaStream->AddTrack(audioTrack.nativeAudioTrack)) {
+ [_audioTracks addObject:audioTrack];
+ }
+}
+
+- (void)addVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack {
+ if (!_signalingThread->IsCurrent()) {
+ return _signalingThread->Invoke<void>(
+ RTC_FROM_HERE, [videoTrack, self]() { return [self addVideoTrack:videoTrack]; });
+ }
+ if (_nativeMediaStream->AddTrack(videoTrack.nativeVideoTrack)) {
+ [_videoTracks addObject:videoTrack];
+ }
+}
+
+- (void)removeAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack {
+ if (!_signalingThread->IsCurrent()) {
+ return _signalingThread->Invoke<void>(
+ RTC_FROM_HERE, [audioTrack, self]() { return [self removeAudioTrack:audioTrack]; });
+ }
+ NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:audioTrack];
+ if (index == NSNotFound) {
+ RTC_LOG(LS_INFO) << "|removeAudioTrack| called on unexpected RTC_OBJC_TYPE(RTCAudioTrack)";
+ return;
+ }
+ if (_nativeMediaStream->RemoveTrack(audioTrack.nativeAudioTrack)) {
+ [_audioTracks removeObjectAtIndex:index];
+ }
+}
+
+- (void)removeVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack {
+ if (!_signalingThread->IsCurrent()) {
+ return _signalingThread->Invoke<void>(
+ RTC_FROM_HERE, [videoTrack, self]() { return [self removeVideoTrack:videoTrack]; });
+ }
+ NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:videoTrack];
+ if (index == NSNotFound) {
+ RTC_LOG(LS_INFO) << "|removeVideoTrack| called on unexpected RTC_OBJC_TYPE(RTCVideoTrack)";
+ return;
+ }
+
+ if (_nativeMediaStream->RemoveTrack(videoTrack.nativeVideoTrack)) {
+ [_videoTracks removeObjectAtIndex:index];
+ }
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaStream):\n%@\nA=%lu\nV=%lu",
+ self.streamId,
+ (unsigned long)self.audioTracks.count,
+ (unsigned long)self.videoTracks.count];
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
+ return _nativeMediaStream;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaStream:
+ (rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
+ NSParameterAssert(nativeMediaStream);
+ if (self = [super init]) {
+ _factory = factory;
+ _signalingThread = factory.signalingThread;
+
+ webrtc::AudioTrackVector audioTracks = nativeMediaStream->GetAudioTracks();
+ webrtc::VideoTrackVector videoTracks = nativeMediaStream->GetVideoTracks();
+
+ _audioTracks = [NSMutableArray arrayWithCapacity:audioTracks.size()];
+ _videoTracks = [NSMutableArray arrayWithCapacity:videoTracks.size()];
+ _nativeMediaStream = nativeMediaStream;
+
+ for (auto &track : audioTracks) {
+ RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio;
+ RTC_OBJC_TYPE(RTCAudioTrack) *audioTrack =
+ [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:_factory
+ nativeTrack:track
+ type:type];
+ [_audioTracks addObject:audioTrack];
+ }
+
+ for (auto &track : videoTracks) {
+ RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo;
+ RTC_OBJC_TYPE(RTCVideoTrack) *videoTrack =
+ [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:_factory
+ nativeTrack:track
+ type:type];
+ [_videoTracks addObject:videoTrack];
+ }
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h
new file mode 100644
index 0000000000..ee51e27b2d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaStreamTrack.h"
+
+#include "api/media_stream_interface.h"
+
+typedef NS_ENUM(NSInteger, RTCMediaStreamTrackType) {
+ RTCMediaStreamTrackTypeAudio,
+ RTCMediaStreamTrackTypeVideo,
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+
+@interface RTC_OBJC_TYPE (RTCMediaStreamTrack)
+()
+
+ @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCPeerConnectionFactory) *
+ factory;
+
+/**
+ * The native MediaStreamTrackInterface passed in or created during
+ * construction.
+ */
+@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack;
+
+/**
+ * Initialize an RTCMediaStreamTrack from a native MediaStreamTrackInterface.
+ */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+ type:(RTCMediaStreamTrackType)type NS_DESIGNATED_INITIALIZER;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack;
+
+- (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track;
+
++ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
+ (RTCMediaStreamTrackState)state;
+
++ (RTCMediaStreamTrackState)trackStateForNativeState:
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeState;
+
++ (NSString *)stringForState:(RTCMediaStreamTrackState)state;
+
++ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)
+ mediaTrackForNativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+ factory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h
new file mode 100644
index 0000000000..2200122ccd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+/**
+ * Represents the state of the track. This exposes the same states in C++.
+ */
+typedef NS_ENUM(NSInteger, RTCMediaStreamTrackState) {
+ RTCMediaStreamTrackStateLive,
+ RTCMediaStreamTrackStateEnded
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_EXTERN NSString *const kRTCMediaStreamTrackKindAudio;
+RTC_EXTERN NSString *const kRTCMediaStreamTrackKindVideo;
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMediaStreamTrack) : NSObject
+
+/**
+ * The kind of track. For example, "audio" if this track represents an audio
+ * track and "video" if this track represents a video track.
+ */
+@property(nonatomic, readonly) NSString *kind;
+
+/** An identifier string. */
+@property(nonatomic, readonly) NSString *trackId;
+
+/** The enabled state of the track. */
+@property(nonatomic, assign) BOOL isEnabled;
+
+/** The state of the track. */
+@property(nonatomic, readonly) RTCMediaStreamTrackState readyState;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm
new file mode 100644
index 0000000000..f1e128ca60
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioTrack+Private.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCVideoTrack+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+NSString * const kRTCMediaStreamTrackKindAudio =
+ @(webrtc::MediaStreamTrackInterface::kAudioKind);
+NSString * const kRTCMediaStreamTrackKindVideo =
+ @(webrtc::MediaStreamTrackInterface::kVideoKind);
+
+@implementation RTC_OBJC_TYPE (RTCMediaStreamTrack) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> _nativeTrack;
+ RTCMediaStreamTrackType _type;
+}
+
+- (NSString *)kind {
+ return [NSString stringForStdString:_nativeTrack->kind()];
+}
+
+- (NSString *)trackId {
+ return [NSString stringForStdString:_nativeTrack->id()];
+}
+
+- (BOOL)isEnabled {
+ return _nativeTrack->enabled();
+}
+
+- (void)setIsEnabled:(BOOL)isEnabled {
+ _nativeTrack->set_enabled(isEnabled);
+}
+
+- (RTCMediaStreamTrackState)readyState {
+ return [[self class] trackStateForNativeState:_nativeTrack->state()];
+}
+
+- (NSString *)description {
+ NSString *readyState = [[self class] stringForState:self.readyState];
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaStreamTrack):\n%@\n%@\n%@\n%@",
+ self.kind,
+ self.trackId,
+ self.isEnabled ? @"enabled" : @"disabled",
+ readyState];
+}
+
+- (BOOL)isEqual:(id)object {
+ if (self == object) {
+ return YES;
+ }
+ if (![object isMemberOfClass:[self class]]) {
+ return NO;
+ }
+ return [self isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)object];
+}
+
+- (NSUInteger)hash {
+ return (NSUInteger)_nativeTrack.get();
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
+ return _nativeTrack;
+}
+
+@synthesize factory = _factory;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+ type:(RTCMediaStreamTrackType)type {
+ NSParameterAssert(nativeTrack);
+ NSParameterAssert(factory);
+ if (self = [super init]) {
+ _factory = factory;
+ _nativeTrack = nativeTrack;
+ _type = type;
+ }
+ return self;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
+ NSParameterAssert(nativeTrack);
+ if (nativeTrack->kind() ==
+ std::string(webrtc::MediaStreamTrackInterface::kAudioKind)) {
+ return [self initWithFactory:factory nativeTrack:nativeTrack type:RTCMediaStreamTrackTypeAudio];
+ }
+ if (nativeTrack->kind() ==
+ std::string(webrtc::MediaStreamTrackInterface::kVideoKind)) {
+ return [self initWithFactory:factory nativeTrack:nativeTrack type:RTCMediaStreamTrackTypeVideo];
+ }
+ return nil;
+}
+
+- (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
+ if (!track) {
+ return NO;
+ }
+ return _nativeTrack == track.nativeTrack;
+}
+
++ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
+ (RTCMediaStreamTrackState)state {
+ switch (state) {
+ case RTCMediaStreamTrackStateLive:
+ return webrtc::MediaStreamTrackInterface::kLive;
+ case RTCMediaStreamTrackStateEnded:
+ return webrtc::MediaStreamTrackInterface::kEnded;
+ }
+}
+
++ (RTCMediaStreamTrackState)trackStateForNativeState:
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeState {
+ switch (nativeState) {
+ case webrtc::MediaStreamTrackInterface::kLive:
+ return RTCMediaStreamTrackStateLive;
+ case webrtc::MediaStreamTrackInterface::kEnded:
+ return RTCMediaStreamTrackStateEnded;
+ }
+}
+
++ (NSString *)stringForState:(RTCMediaStreamTrackState)state {
+ switch (state) {
+ case RTCMediaStreamTrackStateLive:
+ return @"Live";
+ case RTCMediaStreamTrackStateEnded:
+ return @"Ended";
+ }
+}
+
++ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)
+ mediaTrackForNativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+ factory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory {
+ NSParameterAssert(nativeTrack);
+ NSParameterAssert(factory);
+ if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kAudioKind) {
+ return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:factory
+ nativeTrack:nativeTrack
+ type:RTCMediaStreamTrackTypeAudio];
+ } else if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
+ return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:factory
+ nativeTrack:nativeTrack
+ type:RTCMediaStreamTrackTypeVideo];
+ } else {
+ return [[RTC_OBJC_TYPE(RTCMediaStreamTrack) alloc] initWithFactory:factory
+ nativeTrack:nativeTrack];
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.h
new file mode 100644
index 0000000000..fddbb27c90
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCMetricsSampleInfo.h"
+
+/**
+ * Enables gathering of metrics (which can be fetched with
+ * RTCGetAndResetMetrics). Must be called before any other call into WebRTC.
+ */
+RTC_EXTERN void RTCEnableMetrics(void);
+
+/** Gets and clears native histograms. */
+RTC_EXTERN NSArray<RTC_OBJC_TYPE(RTCMetricsSampleInfo) *>* RTCGetAndResetMetrics(void);
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.mm
new file mode 100644
index 0000000000..87eb8c0210
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetrics.mm
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMetrics.h"
+
+#import "RTCMetricsSampleInfo+Private.h"
+
+#include "rtc_base/string_utils.h"
+
+void RTCEnableMetrics(void) {
+ webrtc::metrics::Enable();
+}
+
+NSArray<RTC_OBJC_TYPE(RTCMetricsSampleInfo) *> *RTCGetAndResetMetrics(void) {
+ std::map<std::string, std::unique_ptr<webrtc::metrics::SampleInfo>, rtc::AbslStringViewCmp>
+ histograms;
+ webrtc::metrics::GetAndReset(&histograms);
+
+ NSMutableArray *metrics =
+ [NSMutableArray arrayWithCapacity:histograms.size()];
+ for (auto const &histogram : histograms) {
+ RTC_OBJC_TYPE(RTCMetricsSampleInfo) *metric =
+ [[RTC_OBJC_TYPE(RTCMetricsSampleInfo) alloc] initWithNativeSampleInfo:*histogram.second];
+ [metrics addObject:metric];
+ }
+ return metrics;
+}
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h
new file mode 100644
index 0000000000..e4aa41f6c7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMetricsSampleInfo.h"
+
+#include "system_wrappers/include/metrics.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCMetricsSampleInfo)
+()
+
+ /** Initialize an RTCMetricsSampleInfo object from native SampleInfo. */
+ - (instancetype)initWithNativeSampleInfo : (const webrtc::metrics::SampleInfo &)info;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h
new file mode 100644
index 0000000000..47a877b6fb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMetricsSampleInfo) : NSObject
+
+/**
+ * Example of RTCMetricsSampleInfo:
+ * name: "WebRTC.Video.InputFramesPerSecond"
+ * min: 1
+ * max: 100
+ * bucketCount: 50
+ * samples: [29]:2 [30]:1
+ */
+
+/** The name of the histogram. */
+@property(nonatomic, readonly) NSString *name;
+
+/** The minimum bucket value. */
+@property(nonatomic, readonly) int min;
+
+/** The maximum bucket value. */
+@property(nonatomic, readonly) int max;
+
+/** The number of buckets. */
+@property(nonatomic, readonly) int bucketCount;
+
+/** A dictionary holding the samples <value, # of events>. */
+@property(nonatomic, readonly) NSDictionary<NSNumber *, NSNumber *> *samples;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm
new file mode 100644
index 0000000000..e4be94e90a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMetricsSampleInfo+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCMetricsSampleInfo)
+
+@synthesize name = _name;
+@synthesize min = _min;
+@synthesize max = _max;
+@synthesize bucketCount = _bucketCount;
+@synthesize samples = _samples;
+
+#pragma mark - Private
+
+- (instancetype)initWithNativeSampleInfo:
+ (const webrtc::metrics::SampleInfo &)info {
+ if (self = [super init]) {
+ _name = [NSString stringForStdString:info.name];
+ _min = info.min;
+ _max = info.max;
+ _bucketCount = info.bucket_count;
+
+ NSMutableDictionary *samples =
+ [NSMutableDictionary dictionaryWithCapacity:info.samples.size()];
+ for (auto const &sample : info.samples) {
+ [samples setObject:@(sample.second) forKey:@(sample.first)];
+ }
+ _samples = samples;
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm
new file mode 100644
index 0000000000..cb75f061d8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnection+Private.h"
+
+#import "RTCDataChannel+Private.h"
+#import "RTCDataChannelConfiguration+Private.h"
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCPeerConnection)
+(DataChannel)
+
+ - (nullable RTC_OBJC_TYPE(RTCDataChannel) *)dataChannelForLabel
+ : (NSString *)label configuration
+ : (RTC_OBJC_TYPE(RTCDataChannelConfiguration) *)configuration {
+ std::string labelString = [NSString stdStringForString:label];
+ const webrtc::DataChannelInit nativeInit =
+ configuration.nativeDataChannelInit;
+ auto result = self.nativePeerConnection->CreateDataChannelOrError(labelString, &nativeInit);
+ if (!result.ok()) {
+ return nil;
+ }
+ return [[RTC_OBJC_TYPE(RTCDataChannel) alloc] initWithFactory:self.factory
+ nativeDataChannel:result.MoveValue()];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h
new file mode 100644
index 0000000000..00f2ef7834
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnection.h"
+
+#include "api/peer_connection_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+namespace webrtc {
+
+/**
+ * These objects are created by RTCPeerConnectionFactory to wrap an
+ * id<RTCPeerConnectionDelegate> and call methods on that interface.
+ */
+class PeerConnectionDelegateAdapter : public PeerConnectionObserver {
+ public:
+ PeerConnectionDelegateAdapter(RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection);
+ ~PeerConnectionDelegateAdapter() override;
+
+ void OnSignalingChange(PeerConnectionInterface::SignalingState new_state) override;
+
+ void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
+
+ void OnRemoveStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
+
+ void OnTrack(rtc::scoped_refptr<RtpTransceiverInterface> transceiver) override;
+
+ void OnDataChannel(rtc::scoped_refptr<DataChannelInterface> data_channel) override;
+
+ void OnRenegotiationNeeded() override;
+
+ void OnIceConnectionChange(PeerConnectionInterface::IceConnectionState new_state) override;
+
+ void OnStandardizedIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override;
+
+ void OnConnectionChange(PeerConnectionInterface::PeerConnectionState new_state) override;
+
+ void OnIceGatheringChange(PeerConnectionInterface::IceGatheringState new_state) override;
+
+ void OnIceCandidate(const IceCandidateInterface *candidate) override;
+
+ void OnIceCandidateError(const std::string &address,
+ int port,
+ const std::string &url,
+ int error_code,
+ const std::string &error_text) override;
+
+ void OnIceCandidatesRemoved(const std::vector<cricket::Candidate> &candidates) override;
+
+ void OnIceSelectedCandidatePairChanged(const cricket::CandidatePairChangeEvent &event) override;
+
+ void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>> &streams) override;
+
+ void OnRemoveTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver) override;
+
+ private:
+ __weak RTC_OBJC_TYPE(RTCPeerConnection) * peer_connection_;
+};
+
+} // namespace webrtc
+@protocol RTC_OBJC_TYPE
+(RTCSSLCertificateVerifier);
+
+@interface RTC_OBJC_TYPE (RTCPeerConnection)
+()
+
+ /** The factory used to create this RTCPeerConnection */
+ @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCPeerConnectionFactory) *
+ factory;
+
+/** The native PeerConnectionInterface created during construction. */
+@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::PeerConnectionInterface>
+ nativePeerConnection;
+
+/** Initialize an RTCPeerConnection with a configuration, constraints, and
+ * delegate.
+ */
+- (nullable instancetype)
+ initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ certificateVerifier:(nullable id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)>)certificateVerifier
+ delegate:(nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate;
+
+/** Initialize an RTCPeerConnection with a configuration, constraints,
+ * delegate and PeerConnectionDependencies.
+ */
+- (nullable instancetype)
+ initWithDependencies:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ dependencies:(std::unique_ptr<webrtc::PeerConnectionDependencies>)dependencies
+ delegate:(nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate
+ NS_DESIGNATED_INITIALIZER;
+
++ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
+ (RTCSignalingState)state;
+
++ (RTCSignalingState)signalingStateForNativeState:
+ (webrtc::PeerConnectionInterface::SignalingState)nativeState;
+
++ (NSString *)stringForSignalingState:(RTCSignalingState)state;
+
++ (webrtc::PeerConnectionInterface::IceConnectionState)nativeIceConnectionStateForState:
+ (RTCIceConnectionState)state;
+
++ (webrtc::PeerConnectionInterface::PeerConnectionState)nativeConnectionStateForState:
+ (RTCPeerConnectionState)state;
+
++ (RTCIceConnectionState)iceConnectionStateForNativeState:
+ (webrtc::PeerConnectionInterface::IceConnectionState)nativeState;
+
++ (RTCPeerConnectionState)connectionStateForNativeState:
+ (webrtc::PeerConnectionInterface::PeerConnectionState)nativeState;
+
++ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state;
+
++ (NSString *)stringForConnectionState:(RTCPeerConnectionState)state;
+
++ (webrtc::PeerConnectionInterface::IceGatheringState)nativeIceGatheringStateForState:
+ (RTCIceGatheringState)state;
+
++ (RTCIceGatheringState)iceGatheringStateForNativeState:
+ (webrtc::PeerConnectionInterface::IceGatheringState)nativeState;
+
++ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state;
+
++ (webrtc::PeerConnectionInterface::StatsOutputLevel)nativeStatsOutputLevelForLevel:
+ (RTCStatsOutputLevel)level;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm
new file mode 100644
index 0000000000..f8d38143f3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnection+Private.h"
+
+#import "RTCLegacyStatsReport+Private.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCRtpReceiver+Private.h"
+#import "RTCRtpSender+Private.h"
+#import "RTCStatisticsReport+Private.h"
+#import "helpers/NSString+StdString.h"
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+class StatsCollectorCallbackAdapter : public RTCStatsCollectorCallback {
+ public:
+ StatsCollectorCallbackAdapter(RTCStatisticsCompletionHandler completion_handler)
+ : completion_handler_(completion_handler) {}
+
+ void OnStatsDelivered(const rtc::scoped_refptr<const RTCStatsReport> &report) override {
+ RTC_DCHECK(completion_handler_);
+ RTC_OBJC_TYPE(RTCStatisticsReport) *statisticsReport =
+ [[RTC_OBJC_TYPE(RTCStatisticsReport) alloc] initWithReport:*report];
+ completion_handler_(statisticsReport);
+ completion_handler_ = nil;
+ }
+
+ private:
+ RTCStatisticsCompletionHandler completion_handler_;
+};
+
+class StatsObserverAdapter : public StatsObserver {
+ public:
+ StatsObserverAdapter(
+ void (^completionHandler)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats)) {
+ completion_handler_ = completionHandler;
+ }
+
+ ~StatsObserverAdapter() override { completion_handler_ = nil; }
+
+ void OnComplete(const StatsReports& reports) override {
+ RTC_DCHECK(completion_handler_);
+ NSMutableArray *stats = [NSMutableArray arrayWithCapacity:reports.size()];
+ for (const auto* report : reports) {
+ RTC_OBJC_TYPE(RTCLegacyStatsReport) *statsReport =
+ [[RTC_OBJC_TYPE(RTCLegacyStatsReport) alloc] initWithNativeReport:*report];
+ [stats addObject:statsReport];
+ }
+ completion_handler_(stats);
+ completion_handler_ = nil;
+ }
+
+ private:
+ void (^completion_handler_)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats);
+};
+} // namespace webrtc
+
+@implementation RTC_OBJC_TYPE (RTCPeerConnection)
+(Stats)
+
+ - (void)statisticsForSender : (RTC_OBJC_TYPE(RTCRtpSender) *)sender completionHandler
+ : (RTCStatisticsCompletionHandler)completionHandler {
+ rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
+ rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
+ self.nativePeerConnection->GetStats(sender.nativeRtpSender, collector);
+}
+
+- (void)statisticsForReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver
+ completionHandler:(RTCStatisticsCompletionHandler)completionHandler {
+ rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
+ rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
+ self.nativePeerConnection->GetStats(receiver.nativeRtpReceiver, collector);
+}
+
+- (void)statisticsWithCompletionHandler:(RTCStatisticsCompletionHandler)completionHandler {
+ rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
+ rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
+ self.nativePeerConnection->GetStats(collector.get());
+}
+
+- (void)statsForTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)mediaStreamTrack
+ statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel
+ completionHandler:
+ (void (^)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats))completionHandler {
+ rtc::scoped_refptr<webrtc::StatsObserverAdapter> observer =
+ rtc::make_ref_counted<webrtc::StatsObserverAdapter>(completionHandler);
+ webrtc::PeerConnectionInterface::StatsOutputLevel nativeOutputLevel =
+ [[self class] nativeStatsOutputLevelForLevel:statsOutputLevel];
+ self.nativePeerConnection->GetStats(
+ observer.get(), mediaStreamTrack.nativeTrack.get(), nativeOutputLevel);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.h
new file mode 100644
index 0000000000..55af6868fd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.h
@@ -0,0 +1,398 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCConfiguration);
+@class RTC_OBJC_TYPE(RTCDataChannel);
+@class RTC_OBJC_TYPE(RTCDataChannelConfiguration);
+@class RTC_OBJC_TYPE(RTCIceCandidate);
+@class RTC_OBJC_TYPE(RTCIceCandidateErrorEvent);
+@class RTC_OBJC_TYPE(RTCMediaConstraints);
+@class RTC_OBJC_TYPE(RTCMediaStream);
+@class RTC_OBJC_TYPE(RTCMediaStreamTrack);
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+@class RTC_OBJC_TYPE(RTCRtpReceiver);
+@class RTC_OBJC_TYPE(RTCRtpSender);
+@class RTC_OBJC_TYPE(RTCRtpTransceiver);
+@class RTC_OBJC_TYPE(RTCRtpTransceiverInit);
+@class RTC_OBJC_TYPE(RTCSessionDescription);
+@class RTC_OBJC_TYPE(RTCStatisticsReport);
+@class RTC_OBJC_TYPE(RTCLegacyStatsReport);
+
+typedef NS_ENUM(NSInteger, RTCRtpMediaType);
+
+NS_ASSUME_NONNULL_BEGIN
+
+extern NSString *const kRTCPeerConnectionErrorDomain;
+extern int const kRTCSessionDescriptionErrorCode;
+
+/** Represents the signaling state of the peer connection. */
+typedef NS_ENUM(NSInteger, RTCSignalingState) {
+ RTCSignalingStateStable,
+ RTCSignalingStateHaveLocalOffer,
+ RTCSignalingStateHaveLocalPrAnswer,
+ RTCSignalingStateHaveRemoteOffer,
+ RTCSignalingStateHaveRemotePrAnswer,
+ // Not an actual state, represents the total number of states.
+ RTCSignalingStateClosed,
+};
+
+/** Represents the ice connection state of the peer connection. */
+typedef NS_ENUM(NSInteger, RTCIceConnectionState) {
+ RTCIceConnectionStateNew,
+ RTCIceConnectionStateChecking,
+ RTCIceConnectionStateConnected,
+ RTCIceConnectionStateCompleted,
+ RTCIceConnectionStateFailed,
+ RTCIceConnectionStateDisconnected,
+ RTCIceConnectionStateClosed,
+ RTCIceConnectionStateCount,
+};
+
+/** Represents the combined ice+dtls connection state of the peer connection. */
+typedef NS_ENUM(NSInteger, RTCPeerConnectionState) {
+ RTCPeerConnectionStateNew,
+ RTCPeerConnectionStateConnecting,
+ RTCPeerConnectionStateConnected,
+ RTCPeerConnectionStateDisconnected,
+ RTCPeerConnectionStateFailed,
+ RTCPeerConnectionStateClosed,
+};
+
+/** Represents the ice gathering state of the peer connection. */
+typedef NS_ENUM(NSInteger, RTCIceGatheringState) {
+ RTCIceGatheringStateNew,
+ RTCIceGatheringStateGathering,
+ RTCIceGatheringStateComplete,
+};
+
+/** Represents the stats output level. */
+typedef NS_ENUM(NSInteger, RTCStatsOutputLevel) {
+ RTCStatsOutputLevelStandard,
+ RTCStatsOutputLevelDebug,
+};
+
+typedef void (^RTCCreateSessionDescriptionCompletionHandler)(RTC_OBJC_TYPE(RTCSessionDescription) *
+ _Nullable sdp,
+ NSError *_Nullable error);
+
+typedef void (^RTCSetSessionDescriptionCompletionHandler)(NSError *_Nullable error);
+
+@class RTC_OBJC_TYPE(RTCPeerConnection);
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCPeerConnectionDelegate)<NSObject>
+
+ /** Called when the SignalingState changed. */
+ - (void)peerConnection
+ : (RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeSignalingState
+ : (RTCSignalingState)stateChanged;
+
+/** Called when media is received on a new stream from remote peer. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didAddStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream;
+
+/** Called when a remote peer closes a stream.
+ * This is not called when RTCSdpSemanticsUnifiedPlan is specified.
+ */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didRemoveStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream;
+
+/** Called when negotiation is needed, for example ICE has restarted. */
+- (void)peerConnectionShouldNegotiate:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection;
+
+/** Called any time the IceConnectionState changes. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeIceConnectionState:(RTCIceConnectionState)newState;
+
+/** Called any time the IceGatheringState changes. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeIceGatheringState:(RTCIceGatheringState)newState;
+
+/** New ice candidate has been found. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didGenerateIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate;
+
+/** Called when a group of local Ice candidates have been removed. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didRemoveIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates;
+
+/** New data channel has been opened. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didOpenDataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel;
+
+/** Called when signaling indicates a transceiver will be receiving media from
+ * the remote endpoint.
+ * This is only called with RTCSdpSemanticsUnifiedPlan specified.
+ */
+@optional
+/** Called any time the IceConnectionState changes following standardized
+ * transition. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeStandardizedIceConnectionState:(RTCIceConnectionState)newState;
+
+/** Called any time the PeerConnectionState changes. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeConnectionState:(RTCPeerConnectionState)newState;
+
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didStartReceivingOnTransceiver:(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver;
+
+/** Called when a receiver and its track are created. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didAddReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver
+ streams:(NSArray<RTC_OBJC_TYPE(RTCMediaStream) *> *)mediaStreams;
+
+/** Called when the receiver and its track are removed. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didRemoveReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver;
+
+/** Called when the selected ICE candidate pair is changed. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didChangeLocalCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)local
+ remoteCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)remote
+ lastReceivedMs:(int)lastDataReceivedMs
+ changeReason:(NSString *)reason;
+
+/** Called when gathering of an ICE candidate failed. */
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+ didFailToGatherIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidateErrorEvent) *)event;
+
+@end
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCPeerConnection) : NSObject
+
+/** The object that will be notifed about events such as state changes and
+ * streams being added or removed.
+ */
+@property(nonatomic, weak, nullable) id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)> delegate;
+/** This property is not available with RTCSdpSemanticsUnifiedPlan. Please use
+ * `senders` instead.
+ */
+@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCMediaStream) *> *localStreams;
+@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * localDescription;
+@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * remoteDescription;
+@property(nonatomic, readonly) RTCSignalingState signalingState;
+@property(nonatomic, readonly) RTCIceConnectionState iceConnectionState;
+@property(nonatomic, readonly) RTCPeerConnectionState connectionState;
+@property(nonatomic, readonly) RTCIceGatheringState iceGatheringState;
+@property(nonatomic, readonly, copy) RTC_OBJC_TYPE(RTCConfiguration) * configuration;
+
+/** Gets all RTCRtpSenders associated with this peer connection.
+ * Note: reading this property returns different instances of RTCRtpSender.
+ * Use isEqual: instead of == to compare RTCRtpSender instances.
+ */
+@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCRtpSender) *> *senders;
+
+/** Gets all RTCRtpReceivers associated with this peer connection.
+ * Note: reading this property returns different instances of RTCRtpReceiver.
+ * Use isEqual: instead of == to compare RTCRtpReceiver instances.
+ */
+@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCRtpReceiver) *> *receivers;
+
+/** Gets all RTCRtpTransceivers associated with this peer connection.
+ * Note: reading this property returns different instances of
+ * RTCRtpTransceiver. Use isEqual: instead of == to compare
+ * RTCRtpTransceiver instances. This is only available with
+ * RTCSdpSemanticsUnifiedPlan specified.
+ */
+@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCRtpTransceiver) *> *transceivers;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Sets the PeerConnection's global configuration to `configuration`.
+ * Any changes to STUN/TURN servers or ICE candidate policy will affect the
+ * next gathering phase, and cause the next call to createOffer to generate
+ * new ICE credentials. Note that the BUNDLE and RTCP-multiplexing policies
+ * cannot be changed with this method.
+ */
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration;
+
+/** Terminate all media and close the transport. */
+- (void)close;
+
+/** Provide a remote candidate to the ICE Agent. */
+- (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate
+ DEPRECATED_MSG_ATTRIBUTE("Please use addIceCandidate:completionHandler: instead");
+
+/** Provide a remote candidate to the ICE Agent. */
+- (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate
+ completionHandler:(void (^)(NSError *_Nullable error))completionHandler;
+
+/** Remove a group of remote candidates from the ICE Agent. */
+- (void)removeIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates;
+
+/** Add a new media stream to be sent on this peer connection.
+ * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use
+ * addTrack instead.
+ */
+- (void)addStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream;
+
+/** Remove the given media stream from this peer connection.
+ * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use
+ * removeTrack instead.
+ */
+- (void)removeStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream;
+
+/** Add a new media stream track to be sent on this peer connection, and return
+ * the newly created RTCRtpSender. The RTCRtpSender will be
+ * associated with the streams specified in the `streamIds` list.
+ *
+ * Errors: If an error occurs, returns nil. An error can occur if:
+ * - A sender already exists for the track.
+ * - The peer connection is closed.
+ */
+- (nullable RTC_OBJC_TYPE(RTCRtpSender) *)addTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track
+ streamIds:(NSArray<NSString *> *)streamIds;
+
+/** With PlanB semantics, removes an RTCRtpSender from this peer connection.
+ *
+ * With UnifiedPlan semantics, sets sender's track to null and removes the
+ * send component from the associated RTCRtpTransceiver's direction.
+ *
+ * Returns YES on success.
+ */
+- (BOOL)removeTrack:(RTC_OBJC_TYPE(RTCRtpSender) *)sender;
+
+/** addTransceiver creates a new RTCRtpTransceiver and adds it to the set of
+ * transceivers. Adding a transceiver will cause future calls to CreateOffer
+ * to add a media description for the corresponding transceiver.
+ *
+ * The initial value of `mid` in the returned transceiver is nil. Setting a
+ * new session description may change it to a non-nil value.
+ *
+ * https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-addtransceiver
+ *
+ * Optionally, an RtpTransceiverInit structure can be specified to configure
+ * the transceiver from construction. If not specified, the transceiver will
+ * default to having a direction of kSendRecv and not be part of any streams.
+ *
+ * These methods are only available when Unified Plan is enabled (see
+ * RTCConfiguration).
+ */
+
+/** Adds a transceiver with a sender set to transmit the given track. The kind
+ * of the transceiver (and sender/receiver) will be derived from the kind of
+ * the track.
+ */
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverWithTrack:
+ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track;
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)
+ addTransceiverWithTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track
+ init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init;
+
+/** Adds a transceiver with the given kind. Can either be RTCRtpMediaTypeAudio
+ * or RTCRtpMediaTypeVideo.
+ */
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType;
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)
+ addTransceiverOfType:(RTCRtpMediaType)mediaType
+ init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init;
+
+/** Tells the PeerConnection that ICE should be restarted. This triggers a need
+ * for negotiation and subsequent offerForConstraints:completionHandler call will act as if
+ * RTCOfferAnswerOptions::ice_restart is true.
+ */
+- (void)restartIce;
+
+/** Generate an SDP offer. */
+- (void)offerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler;
+
+/** Generate an SDP answer. */
+- (void)answerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler;
+
+/** Apply the supplied RTCSessionDescription as the local description. */
+- (void)setLocalDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
+ completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler;
+
+/** Creates an offer or answer (depending on current signaling state) and sets
+ * it as the local session description. */
+- (void)setLocalDescriptionWithCompletionHandler:
+ (RTCSetSessionDescriptionCompletionHandler)completionHandler;
+
+/** Apply the supplied RTCSessionDescription as the remote description. */
+- (void)setRemoteDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
+ completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler;
+
+/** Limits the bandwidth allocated for all RTP streams sent by this
+ * PeerConnection. Nil parameters will be unchanged. Setting
+ * `currentBitrateBps` will force the available bitrate estimate to the given
+ * value. Returns YES if the parameters were successfully updated.
+ */
+- (BOOL)setBweMinBitrateBps:(nullable NSNumber *)minBitrateBps
+ currentBitrateBps:(nullable NSNumber *)currentBitrateBps
+ maxBitrateBps:(nullable NSNumber *)maxBitrateBps;
+
+/** Start or stop recording an Rtc EventLog. */
+- (BOOL)startRtcEventLogWithFilePath:(NSString *)filePath maxSizeInBytes:(int64_t)maxSizeInBytes;
+- (void)stopRtcEventLog;
+
+@end
+
+@interface RTC_OBJC_TYPE (RTCPeerConnection)
+(Media)
+
+ /** Create an RTCRtpSender with the specified kind and media stream ID.
+ * See RTCMediaStreamTrack.h for available kinds.
+ * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use
+ * addTransceiver instead.
+ */
+ - (RTC_OBJC_TYPE(RTCRtpSender) *)senderWithKind : (NSString *)kind streamId
+ : (NSString *)streamId;
+
+@end
+
+@interface RTC_OBJC_TYPE (RTCPeerConnection)
+(DataChannel)
+
+ /** Create a new data channel with the given label and configuration. */
+ - (nullable RTC_OBJC_TYPE(RTCDataChannel) *)dataChannelForLabel
+ : (NSString *)label configuration : (RTC_OBJC_TYPE(RTCDataChannelConfiguration) *)configuration;
+
+@end
+
+typedef void (^RTCStatisticsCompletionHandler)(RTC_OBJC_TYPE(RTCStatisticsReport) *);
+
+@interface RTC_OBJC_TYPE (RTCPeerConnection)
+(Stats)
+
+ /** Gather stats for the given RTCMediaStreamTrack. If `mediaStreamTrack` is nil
+ * statistics are gathered for all tracks.
+ */
+ - (void)statsForTrack
+ : (nullable RTC_OBJC_TYPE(RTCMediaStreamTrack) *)mediaStreamTrack statsOutputLevel
+ : (RTCStatsOutputLevel)statsOutputLevel completionHandler
+ : (nullable void (^)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats))completionHandler;
+
+/** Gather statistic through the v2 statistics API. */
+- (void)statisticsWithCompletionHandler:(RTCStatisticsCompletionHandler)completionHandler;
+
+/** Spec-compliant getStats() performing the stats selection algorithm with the
+ * sender.
+ */
+- (void)statisticsForSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender
+ completionHandler:(RTCStatisticsCompletionHandler)completionHandler;
+
+/** Spec-compliant getStats() performing the stats selection algorithm with the
+ * receiver.
+ */
+- (void)statisticsForReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver
+ completionHandler:(RTCStatisticsCompletionHandler)completionHandler;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.mm
new file mode 100644
index 0000000000..f4db472380
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnection.mm
@@ -0,0 +1,939 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnection+Private.h"
+
+#import "RTCConfiguration+Private.h"
+#import "RTCDataChannel+Private.h"
+#import "RTCIceCandidate+Private.h"
+#import "RTCIceCandidateErrorEvent+Private.h"
+#import "RTCLegacyStatsReport+Private.h"
+#import "RTCMediaConstraints+Private.h"
+#import "RTCMediaStream+Private.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCRtpReceiver+Private.h"
+#import "RTCRtpSender+Private.h"
+#import "RTCRtpTransceiver+Private.h"
+#import "RTCSessionDescription+Private.h"
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+#include <memory>
+
+#include "api/jsep_ice_candidate.h"
+#include "api/rtc_event_log_output_file.h"
+#include "api/set_local_description_observer_interface.h"
+#include "api/set_remote_description_observer_interface.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "sdk/objc/native/api/ssl_certificate_verifier.h"
+
+NSString *const kRTCPeerConnectionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCPeerConnection)";
+int const kRTCPeerConnnectionSessionDescriptionError = -1;
+
+namespace {
+
+class SetSessionDescriptionObserver : public webrtc::SetLocalDescriptionObserverInterface,
+ public webrtc::SetRemoteDescriptionObserverInterface {
+ public:
+ SetSessionDescriptionObserver(RTCSetSessionDescriptionCompletionHandler completionHandler) {
+ completion_handler_ = completionHandler;
+ }
+
+ virtual void OnSetLocalDescriptionComplete(webrtc::RTCError error) override {
+ OnCompelete(error);
+ }
+
+ virtual void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override {
+ OnCompelete(error);
+ }
+
+ private:
+ void OnCompelete(webrtc::RTCError error) {
+ RTC_DCHECK(completion_handler_ != nil);
+ if (error.ok()) {
+ completion_handler_(nil);
+ } else {
+ // TODO(hta): Add handling of error.type()
+ NSString *str = [NSString stringForStdString:error.message()];
+ NSError *err = [NSError errorWithDomain:kRTCPeerConnectionErrorDomain
+ code:kRTCPeerConnnectionSessionDescriptionError
+ userInfo:@{NSLocalizedDescriptionKey : str}];
+ completion_handler_(err);
+ }
+ completion_handler_ = nil;
+ }
+ RTCSetSessionDescriptionCompletionHandler completion_handler_;
+};
+
+} // anonymous namespace
+
+namespace webrtc {
+
+class CreateSessionDescriptionObserverAdapter
+ : public CreateSessionDescriptionObserver {
+ public:
+ CreateSessionDescriptionObserverAdapter(void (^completionHandler)(
+ RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription, NSError *error)) {
+ completion_handler_ = completionHandler;
+ }
+
+ ~CreateSessionDescriptionObserverAdapter() override { completion_handler_ = nil; }
+
+ void OnSuccess(SessionDescriptionInterface *desc) override {
+ RTC_DCHECK(completion_handler_);
+ std::unique_ptr<webrtc::SessionDescriptionInterface> description =
+ std::unique_ptr<webrtc::SessionDescriptionInterface>(desc);
+ RTC_OBJC_TYPE(RTCSessionDescription) *session =
+ [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description.get()];
+ completion_handler_(session, nil);
+ completion_handler_ = nil;
+ }
+
+ void OnFailure(RTCError error) override {
+ RTC_DCHECK(completion_handler_);
+ // TODO(hta): Add handling of error.type()
+ NSString *str = [NSString stringForStdString:error.message()];
+ NSError* err =
+ [NSError errorWithDomain:kRTCPeerConnectionErrorDomain
+ code:kRTCPeerConnnectionSessionDescriptionError
+ userInfo:@{ NSLocalizedDescriptionKey : str }];
+ completion_handler_(nil, err);
+ completion_handler_ = nil;
+ }
+
+ private:
+ void (^completion_handler_)(RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription,
+ NSError *error);
+};
+
+PeerConnectionDelegateAdapter::PeerConnectionDelegateAdapter(RTC_OBJC_TYPE(RTCPeerConnection) *
+ peerConnection) {
+ peer_connection_ = peerConnection;
+}
+
+PeerConnectionDelegateAdapter::~PeerConnectionDelegateAdapter() {
+ peer_connection_ = nil;
+}
+
+void PeerConnectionDelegateAdapter::OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) {
+ RTCSignalingState state =
+ [[RTC_OBJC_TYPE(RTCPeerConnection) class] signalingStateForNativeState:new_state];
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ [peer_connection.delegate peerConnection:peer_connection
+ didChangeSignalingState:state];
+}
+
+void PeerConnectionDelegateAdapter::OnAddStream(
+ rtc::scoped_refptr<MediaStreamInterface> stream) {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ RTC_OBJC_TYPE(RTCMediaStream) *mediaStream =
+ [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory
+ nativeMediaStream:stream];
+ [peer_connection.delegate peerConnection:peer_connection
+ didAddStream:mediaStream];
+}
+
+void PeerConnectionDelegateAdapter::OnRemoveStream(
+ rtc::scoped_refptr<MediaStreamInterface> stream) {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ RTC_OBJC_TYPE(RTCMediaStream) *mediaStream =
+ [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory
+ nativeMediaStream:stream];
+
+ [peer_connection.delegate peerConnection:peer_connection
+ didRemoveStream:mediaStream];
+}
+
+void PeerConnectionDelegateAdapter::OnTrack(
+ rtc::scoped_refptr<RtpTransceiverInterface> nativeTransceiver) {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver =
+ [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc] initWithFactory:peer_connection.factory
+ nativeRtpTransceiver:nativeTransceiver];
+ if ([peer_connection.delegate
+ respondsToSelector:@selector(peerConnection:didStartReceivingOnTransceiver:)]) {
+ [peer_connection.delegate peerConnection:peer_connection
+ didStartReceivingOnTransceiver:transceiver];
+ }
+}
+
+void PeerConnectionDelegateAdapter::OnDataChannel(
+ rtc::scoped_refptr<DataChannelInterface> data_channel) {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ RTC_OBJC_TYPE(RTCDataChannel) *dataChannel =
+ [[RTC_OBJC_TYPE(RTCDataChannel) alloc] initWithFactory:peer_connection.factory
+ nativeDataChannel:data_channel];
+ [peer_connection.delegate peerConnection:peer_connection
+ didOpenDataChannel:dataChannel];
+}
+
+void PeerConnectionDelegateAdapter::OnRenegotiationNeeded() {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ [peer_connection.delegate peerConnectionShouldNegotiate:peer_connection];
+}
+
+void PeerConnectionDelegateAdapter::OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {
+ RTCIceConnectionState state =
+ [RTC_OBJC_TYPE(RTCPeerConnection) iceConnectionStateForNativeState:new_state];
+ [peer_connection_.delegate peerConnection:peer_connection_ didChangeIceConnectionState:state];
+}
+
+void PeerConnectionDelegateAdapter::OnStandardizedIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {
+ if ([peer_connection_.delegate
+ respondsToSelector:@selector(peerConnection:didChangeStandardizedIceConnectionState:)]) {
+ RTCIceConnectionState state =
+ [RTC_OBJC_TYPE(RTCPeerConnection) iceConnectionStateForNativeState:new_state];
+ [peer_connection_.delegate peerConnection:peer_connection_
+ didChangeStandardizedIceConnectionState:state];
+ }
+}
+
+void PeerConnectionDelegateAdapter::OnConnectionChange(
+ PeerConnectionInterface::PeerConnectionState new_state) {
+ if ([peer_connection_.delegate
+ respondsToSelector:@selector(peerConnection:didChangeConnectionState:)]) {
+ RTCPeerConnectionState state =
+ [RTC_OBJC_TYPE(RTCPeerConnection) connectionStateForNativeState:new_state];
+ [peer_connection_.delegate peerConnection:peer_connection_ didChangeConnectionState:state];
+ }
+}
+
+void PeerConnectionDelegateAdapter::OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) {
+ RTCIceGatheringState state =
+ [[RTC_OBJC_TYPE(RTCPeerConnection) class] iceGatheringStateForNativeState:new_state];
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ [peer_connection.delegate peerConnection:peer_connection
+ didChangeIceGatheringState:state];
+}
+
+void PeerConnectionDelegateAdapter::OnIceCandidate(
+ const IceCandidateInterface *candidate) {
+ RTC_OBJC_TYPE(RTCIceCandidate) *iceCandidate =
+ [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:candidate];
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ [peer_connection.delegate peerConnection:peer_connection
+ didGenerateIceCandidate:iceCandidate];
+}
+
+void PeerConnectionDelegateAdapter::OnIceCandidateError(const std::string &address,
+ int port,
+ const std::string &url,
+ int error_code,
+ const std::string &error_text) {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ RTC_OBJC_TYPE(RTCIceCandidateErrorEvent) *event =
+ [[RTC_OBJC_TYPE(RTCIceCandidateErrorEvent) alloc] initWithAddress:address
+ port:port
+ url:url
+ errorCode:error_code
+ errorText:error_text];
+ if ([peer_connection.delegate respondsToSelector:@selector(peerConnection:
+ didFailToGatherIceCandidate:)]) {
+ [peer_connection.delegate peerConnection:peer_connection didFailToGatherIceCandidate:event];
+ }
+}
+
+void PeerConnectionDelegateAdapter::OnIceCandidatesRemoved(
+ const std::vector<cricket::Candidate>& candidates) {
+ NSMutableArray* ice_candidates =
+ [NSMutableArray arrayWithCapacity:candidates.size()];
+ for (const auto& candidate : candidates) {
+ std::unique_ptr<JsepIceCandidate> candidate_wrapper(
+ new JsepIceCandidate(candidate.transport_name(), -1, candidate));
+ RTC_OBJC_TYPE(RTCIceCandidate) *ice_candidate =
+ [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:candidate_wrapper.get()];
+ [ice_candidates addObject:ice_candidate];
+ }
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ [peer_connection.delegate peerConnection:peer_connection
+ didRemoveIceCandidates:ice_candidates];
+}
+
+void PeerConnectionDelegateAdapter::OnIceSelectedCandidatePairChanged(
+ const cricket::CandidatePairChangeEvent &event) {
+ const auto &selected_pair = event.selected_candidate_pair;
+ auto local_candidate_wrapper = std::make_unique<JsepIceCandidate>(
+ selected_pair.local_candidate().transport_name(), -1, selected_pair.local_candidate());
+ RTC_OBJC_TYPE(RTCIceCandidate) *local_candidate = [[RTC_OBJC_TYPE(RTCIceCandidate) alloc]
+ initWithNativeCandidate:local_candidate_wrapper.release()];
+ auto remote_candidate_wrapper = std::make_unique<JsepIceCandidate>(
+ selected_pair.remote_candidate().transport_name(), -1, selected_pair.remote_candidate());
+ RTC_OBJC_TYPE(RTCIceCandidate) *remote_candidate = [[RTC_OBJC_TYPE(RTCIceCandidate) alloc]
+ initWithNativeCandidate:remote_candidate_wrapper.release()];
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ NSString *nsstr_reason = [NSString stringForStdString:event.reason];
+ if ([peer_connection.delegate
+ respondsToSelector:@selector
+ (peerConnection:didChangeLocalCandidate:remoteCandidate:lastReceivedMs:changeReason:)]) {
+ [peer_connection.delegate peerConnection:peer_connection
+ didChangeLocalCandidate:local_candidate
+ remoteCandidate:remote_candidate
+ lastReceivedMs:event.last_data_received_ms
+ changeReason:nsstr_reason];
+ }
+}
+
+void PeerConnectionDelegateAdapter::OnAddTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>> &streams) {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ if ([peer_connection.delegate respondsToSelector:@selector(peerConnection:
+ didAddReceiver:streams:)]) {
+ NSMutableArray *mediaStreams = [NSMutableArray arrayWithCapacity:streams.size()];
+ for (const auto &nativeStream : streams) {
+ RTC_OBJC_TYPE(RTCMediaStream) *mediaStream =
+ [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory
+ nativeMediaStream:nativeStream];
+ [mediaStreams addObject:mediaStream];
+ }
+ RTC_OBJC_TYPE(RTCRtpReceiver) *rtpReceiver =
+ [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:peer_connection.factory
+ nativeRtpReceiver:receiver];
+
+ [peer_connection.delegate peerConnection:peer_connection
+ didAddReceiver:rtpReceiver
+ streams:mediaStreams];
+ }
+}
+
+void PeerConnectionDelegateAdapter::OnRemoveTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+ if ([peer_connection.delegate respondsToSelector:@selector(peerConnection:didRemoveReceiver:)]) {
+ RTC_OBJC_TYPE(RTCRtpReceiver) *rtpReceiver =
+ [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:peer_connection.factory
+ nativeRtpReceiver:receiver];
+ [peer_connection.delegate peerConnection:peer_connection didRemoveReceiver:rtpReceiver];
+ }
+}
+
+} // namespace webrtc
+
+@implementation RTC_OBJC_TYPE (RTCPeerConnection) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ NSMutableArray<RTC_OBJC_TYPE(RTCMediaStream) *> *_localStreams;
+ std::unique_ptr<webrtc::PeerConnectionDelegateAdapter> _observer;
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> _peerConnection;
+ std::unique_ptr<webrtc::MediaConstraints> _nativeConstraints;
+ BOOL _hasStartedRtcEventLog;
+}
+
+@synthesize delegate = _delegate;
+@synthesize factory = _factory;
+
+- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ certificateVerifier:
+ (nullable id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)>)certificateVerifier
+ delegate:(id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate {
+ NSParameterAssert(factory);
+ std::unique_ptr<webrtc::PeerConnectionDependencies> dependencies =
+ std::make_unique<webrtc::PeerConnectionDependencies>(nullptr);
+ if (certificateVerifier != nil) {
+ dependencies->tls_cert_verifier = webrtc::ObjCToNativeCertificateVerifier(certificateVerifier);
+ }
+ return [self initWithDependencies:factory
+ configuration:configuration
+ constraints:constraints
+ dependencies:std::move(dependencies)
+ delegate:delegate];
+}
+
+- (nullable instancetype)
+ initWithDependencies:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ dependencies:(std::unique_ptr<webrtc::PeerConnectionDependencies>)dependencies
+ delegate:(id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate {
+ NSParameterAssert(factory);
+ NSParameterAssert(dependencies.get());
+ std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
+ [configuration createNativeConfiguration]);
+ if (!config) {
+ return nil;
+ }
+ if (self = [super init]) {
+ _observer.reset(new webrtc::PeerConnectionDelegateAdapter(self));
+ _nativeConstraints = constraints.nativeConstraints;
+ CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(), config.get());
+
+ webrtc::PeerConnectionDependencies deps = std::move(*dependencies.release());
+ deps.observer = _observer.get();
+ auto result = factory.nativeFactory->CreatePeerConnectionOrError(*config, std::move(deps));
+
+ if (!result.ok()) {
+ return nil;
+ }
+ _peerConnection = result.MoveValue();
+ _factory = factory;
+ _localStreams = [[NSMutableArray alloc] init];
+ _delegate = delegate;
+ }
+ return self;
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCMediaStream) *> *)localStreams {
+ return [_localStreams copy];
+}
+
+- (RTC_OBJC_TYPE(RTCSessionDescription) *)localDescription {
+ // It's only safe to operate on SessionDescriptionInterface on the signaling thread.
+ return _peerConnection->signaling_thread()->Invoke<RTC_OBJC_TYPE(RTCSessionDescription) *>(
+ RTC_FROM_HERE, [self] {
+ const webrtc::SessionDescriptionInterface *description =
+ _peerConnection->local_description();
+ return description ?
+ [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description] :
+ nil;
+ });
+}
+
+- (RTC_OBJC_TYPE(RTCSessionDescription) *)remoteDescription {
+ // It's only safe to operate on SessionDescriptionInterface on the signaling thread.
+ return _peerConnection->signaling_thread()->Invoke<RTC_OBJC_TYPE(RTCSessionDescription) *>(
+ RTC_FROM_HERE, [self] {
+ const webrtc::SessionDescriptionInterface *description =
+ _peerConnection->remote_description();
+ return description ?
+ [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description] :
+ nil;
+ });
+}
+
+- (RTCSignalingState)signalingState {
+ return [[self class]
+ signalingStateForNativeState:_peerConnection->signaling_state()];
+}
+
+- (RTCIceConnectionState)iceConnectionState {
+ return [[self class] iceConnectionStateForNativeState:
+ _peerConnection->ice_connection_state()];
+}
+
+- (RTCPeerConnectionState)connectionState {
+ return [[self class] connectionStateForNativeState:_peerConnection->peer_connection_state()];
+}
+
+- (RTCIceGatheringState)iceGatheringState {
+ return [[self class] iceGatheringStateForNativeState:
+ _peerConnection->ice_gathering_state()];
+}
+
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration {
+ std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
+ [configuration createNativeConfiguration]);
+ if (!config) {
+ return NO;
+ }
+ CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(),
+ config.get());
+ return _peerConnection->SetConfiguration(*config).ok();
+}
+
+- (RTC_OBJC_TYPE(RTCConfiguration) *)configuration {
+ webrtc::PeerConnectionInterface::RTCConfiguration config =
+ _peerConnection->GetConfiguration();
+ return [[RTC_OBJC_TYPE(RTCConfiguration) alloc] initWithNativeConfiguration:config];
+}
+
+- (void)close {
+ _peerConnection->Close();
+}
+
+- (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate {
+ std::unique_ptr<const webrtc::IceCandidateInterface> iceCandidate(
+ candidate.nativeCandidate);
+ _peerConnection->AddIceCandidate(iceCandidate.get());
+}
+- (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate
+ completionHandler:(void (^)(NSError *_Nullable error))completionHandler {
+ RTC_DCHECK(completionHandler != nil);
+ _peerConnection->AddIceCandidate(
+ candidate.nativeCandidate, [completionHandler](const auto &error) {
+ if (error.ok()) {
+ completionHandler(nil);
+ } else {
+ NSString *str = [NSString stringForStdString:error.message()];
+ NSError *err = [NSError errorWithDomain:kRTCPeerConnectionErrorDomain
+ code:static_cast<NSInteger>(error.type())
+ userInfo:@{NSLocalizedDescriptionKey : str}];
+ completionHandler(err);
+ }
+ });
+}
+- (void)removeIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)iceCandidates {
+ std::vector<cricket::Candidate> candidates;
+ for (RTC_OBJC_TYPE(RTCIceCandidate) * iceCandidate in iceCandidates) {
+ std::unique_ptr<const webrtc::IceCandidateInterface> candidate(
+ iceCandidate.nativeCandidate);
+ if (candidate) {
+ candidates.push_back(candidate->candidate());
+ // Need to fill the transport name from the sdp_mid.
+ candidates.back().set_transport_name(candidate->sdp_mid());
+ }
+ }
+ if (!candidates.empty()) {
+ _peerConnection->RemoveIceCandidates(candidates);
+ }
+}
+
+- (void)addStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream {
+ if (!_peerConnection->AddStream(stream.nativeMediaStream.get())) {
+ RTCLogError(@"Failed to add stream: %@", stream);
+ return;
+ }
+ [_localStreams addObject:stream];
+}
+
+- (void)removeStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream {
+ _peerConnection->RemoveStream(stream.nativeMediaStream.get());
+ [_localStreams removeObject:stream];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCRtpSender) *)addTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track
+ streamIds:(NSArray<NSString *> *)streamIds {
+ std::vector<std::string> nativeStreamIds;
+ for (NSString *streamId in streamIds) {
+ nativeStreamIds.push_back([streamId UTF8String]);
+ }
+ webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpSenderInterface>> nativeSenderOrError =
+ _peerConnection->AddTrack(track.nativeTrack, nativeStreamIds);
+ if (!nativeSenderOrError.ok()) {
+ RTCLogError(@"Failed to add track %@: %s", track, nativeSenderOrError.error().message());
+ return nil;
+ }
+ return [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:self.factory
+ nativeRtpSender:nativeSenderOrError.MoveValue()];
+}
+
+- (BOOL)removeTrack:(RTC_OBJC_TYPE(RTCRtpSender) *)sender {
+ bool result = _peerConnection->RemoveTrackOrError(sender.nativeRtpSender).ok();
+ if (!result) {
+ RTCLogError(@"Failed to remote track %@", sender);
+ }
+ return result;
+}
+
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverWithTrack:
+ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
+ return [self addTransceiverWithTrack:track
+ init:[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)
+ addTransceiverWithTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track
+ init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init {
+ webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceiverOrError =
+ _peerConnection->AddTransceiver(track.nativeTrack, init.nativeInit);
+ if (!nativeTransceiverOrError.ok()) {
+ RTCLogError(
+ @"Failed to add transceiver %@: %s", track, nativeTransceiverOrError.error().message());
+ return nil;
+ }
+ return [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc]
+ initWithFactory:self.factory
+ nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType {
+ return [self addTransceiverOfType:mediaType
+ init:[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)
+ addTransceiverOfType:(RTCRtpMediaType)mediaType
+ init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init {
+ webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceiverOrError =
+ _peerConnection->AddTransceiver(
+ [RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType:mediaType], init.nativeInit);
+ if (!nativeTransceiverOrError.ok()) {
+ RTCLogError(@"Failed to add transceiver %@: %s",
+ [RTC_OBJC_TYPE(RTCRtpReceiver) stringForMediaType:mediaType],
+ nativeTransceiverOrError.error().message());
+ return nil;
+ }
+ return [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc]
+ initWithFactory:self.factory
+ nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()];
+}
+
+- (void)restartIce {
+ _peerConnection->RestartIce();
+}
+
+- (void)offerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler {
+ RTC_DCHECK(completionHandler != nil);
+ rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter> observer =
+ rtc::make_ref_counted<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler);
+ webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
+ CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options);
+
+ _peerConnection->CreateOffer(observer.get(), options);
+}
+
+- (void)answerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler {
+ RTC_DCHECK(completionHandler != nil);
+ rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter> observer =
+ rtc::make_ref_counted<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler);
+ webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
+ CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options);
+
+ _peerConnection->CreateAnswer(observer.get(), options);
+}
+
+- (void)setLocalDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
+ completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler {
+ RTC_DCHECK(completionHandler != nil);
+ rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer =
+ rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
+ _peerConnection->SetLocalDescription(sdp.nativeDescription, observer);
+}
+
+- (void)setLocalDescriptionWithCompletionHandler:
+ (RTCSetSessionDescriptionCompletionHandler)completionHandler {
+ RTC_DCHECK(completionHandler != nil);
+ rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer =
+ rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
+ _peerConnection->SetLocalDescription(observer);
+}
+
+- (void)setRemoteDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
+ completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler {
+ RTC_DCHECK(completionHandler != nil);
+ rtc::scoped_refptr<webrtc::SetRemoteDescriptionObserverInterface> observer =
+ rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
+ _peerConnection->SetRemoteDescription(sdp.nativeDescription, observer);
+}
+
+- (BOOL)setBweMinBitrateBps:(nullable NSNumber *)minBitrateBps
+ currentBitrateBps:(nullable NSNumber *)currentBitrateBps
+ maxBitrateBps:(nullable NSNumber *)maxBitrateBps {
+ webrtc::BitrateSettings params;
+ if (minBitrateBps != nil) {
+ params.min_bitrate_bps = absl::optional<int>(minBitrateBps.intValue);
+ }
+ if (currentBitrateBps != nil) {
+ params.start_bitrate_bps = absl::optional<int>(currentBitrateBps.intValue);
+ }
+ if (maxBitrateBps != nil) {
+ params.max_bitrate_bps = absl::optional<int>(maxBitrateBps.intValue);
+ }
+ return _peerConnection->SetBitrate(params).ok();
+}
+
+- (BOOL)startRtcEventLogWithFilePath:(NSString *)filePath
+ maxSizeInBytes:(int64_t)maxSizeInBytes {
+ RTC_DCHECK(filePath.length);
+ RTC_DCHECK_GT(maxSizeInBytes, 0);
+ RTC_DCHECK(!_hasStartedRtcEventLog);
+ if (_hasStartedRtcEventLog) {
+ RTCLogError(@"Event logging already started.");
+ return NO;
+ }
+ FILE *f = fopen(filePath.UTF8String, "wb");
+ if (!f) {
+ RTCLogError(@"Error opening file: %@. Error: %d", filePath, errno);
+ return NO;
+ }
+ // TODO(eladalon): It would be better to not allow negative values into PC.
+ const size_t max_size = (maxSizeInBytes < 0) ? webrtc::RtcEventLog::kUnlimitedOutput :
+ rtc::saturated_cast<size_t>(maxSizeInBytes);
+
+ _hasStartedRtcEventLog = _peerConnection->StartRtcEventLog(
+ std::make_unique<webrtc::RtcEventLogOutputFile>(f, max_size));
+ return _hasStartedRtcEventLog;
+}
+
+- (void)stopRtcEventLog {
+ _peerConnection->StopRtcEventLog();
+ _hasStartedRtcEventLog = NO;
+}
+
+- (RTC_OBJC_TYPE(RTCRtpSender) *)senderWithKind:(NSString *)kind streamId:(NSString *)streamId {
+ std::string nativeKind = [NSString stdStringForString:kind];
+ std::string nativeStreamId = [NSString stdStringForString:streamId];
+ rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeSender(
+ _peerConnection->CreateSender(nativeKind, nativeStreamId));
+ return nativeSender ? [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:self.factory
+ nativeRtpSender:nativeSender] :
+ nil;
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCRtpSender) *> *)senders {
+ std::vector<rtc::scoped_refptr<webrtc::RtpSenderInterface>> nativeSenders(
+ _peerConnection->GetSenders());
+ NSMutableArray *senders = [[NSMutableArray alloc] init];
+ for (const auto &nativeSender : nativeSenders) {
+ RTC_OBJC_TYPE(RTCRtpSender) *sender =
+ [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:self.factory
+ nativeRtpSender:nativeSender];
+ [senders addObject:sender];
+ }
+ return senders;
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCRtpReceiver) *> *)receivers {
+ std::vector<rtc::scoped_refptr<webrtc::RtpReceiverInterface>> nativeReceivers(
+ _peerConnection->GetReceivers());
+ NSMutableArray *receivers = [[NSMutableArray alloc] init];
+ for (const auto &nativeReceiver : nativeReceivers) {
+ RTC_OBJC_TYPE(RTCRtpReceiver) *receiver =
+ [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:self.factory
+ nativeRtpReceiver:nativeReceiver];
+ [receivers addObject:receiver];
+ }
+ return receivers;
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCRtpTransceiver) *> *)transceivers {
+ std::vector<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceivers(
+ _peerConnection->GetTransceivers());
+ NSMutableArray *transceivers = [[NSMutableArray alloc] init];
+ for (const auto &nativeTransceiver : nativeTransceivers) {
+ RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver =
+ [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc] initWithFactory:self.factory
+ nativeRtpTransceiver:nativeTransceiver];
+ [transceivers addObject:transceiver];
+ }
+ return transceivers;
+}
+
+#pragma mark - Private
+
++ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
+ (RTCSignalingState)state {
+ switch (state) {
+ case RTCSignalingStateStable:
+ return webrtc::PeerConnectionInterface::kStable;
+ case RTCSignalingStateHaveLocalOffer:
+ return webrtc::PeerConnectionInterface::kHaveLocalOffer;
+ case RTCSignalingStateHaveLocalPrAnswer:
+ return webrtc::PeerConnectionInterface::kHaveLocalPrAnswer;
+ case RTCSignalingStateHaveRemoteOffer:
+ return webrtc::PeerConnectionInterface::kHaveRemoteOffer;
+ case RTCSignalingStateHaveRemotePrAnswer:
+ return webrtc::PeerConnectionInterface::kHaveRemotePrAnswer;
+ case RTCSignalingStateClosed:
+ return webrtc::PeerConnectionInterface::kClosed;
+ }
+}
+
++ (RTCSignalingState)signalingStateForNativeState:
+ (webrtc::PeerConnectionInterface::SignalingState)nativeState {
+ switch (nativeState) {
+ case webrtc::PeerConnectionInterface::kStable:
+ return RTCSignalingStateStable;
+ case webrtc::PeerConnectionInterface::kHaveLocalOffer:
+ return RTCSignalingStateHaveLocalOffer;
+ case webrtc::PeerConnectionInterface::kHaveLocalPrAnswer:
+ return RTCSignalingStateHaveLocalPrAnswer;
+ case webrtc::PeerConnectionInterface::kHaveRemoteOffer:
+ return RTCSignalingStateHaveRemoteOffer;
+ case webrtc::PeerConnectionInterface::kHaveRemotePrAnswer:
+ return RTCSignalingStateHaveRemotePrAnswer;
+ case webrtc::PeerConnectionInterface::kClosed:
+ return RTCSignalingStateClosed;
+ }
+}
+
++ (NSString *)stringForSignalingState:(RTCSignalingState)state {
+ switch (state) {
+ case RTCSignalingStateStable:
+ return @"STABLE";
+ case RTCSignalingStateHaveLocalOffer:
+ return @"HAVE_LOCAL_OFFER";
+ case RTCSignalingStateHaveLocalPrAnswer:
+ return @"HAVE_LOCAL_PRANSWER";
+ case RTCSignalingStateHaveRemoteOffer:
+ return @"HAVE_REMOTE_OFFER";
+ case RTCSignalingStateHaveRemotePrAnswer:
+ return @"HAVE_REMOTE_PRANSWER";
+ case RTCSignalingStateClosed:
+ return @"CLOSED";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::PeerConnectionState)nativeConnectionStateForState:
+ (RTCPeerConnectionState)state {
+ switch (state) {
+ case RTCPeerConnectionStateNew:
+ return webrtc::PeerConnectionInterface::PeerConnectionState::kNew;
+ case RTCPeerConnectionStateConnecting:
+ return webrtc::PeerConnectionInterface::PeerConnectionState::kConnecting;
+ case RTCPeerConnectionStateConnected:
+ return webrtc::PeerConnectionInterface::PeerConnectionState::kConnected;
+ case RTCPeerConnectionStateFailed:
+ return webrtc::PeerConnectionInterface::PeerConnectionState::kFailed;
+ case RTCPeerConnectionStateDisconnected:
+ return webrtc::PeerConnectionInterface::PeerConnectionState::kDisconnected;
+ case RTCPeerConnectionStateClosed:
+ return webrtc::PeerConnectionInterface::PeerConnectionState::kClosed;
+ }
+}
+
++ (RTCPeerConnectionState)connectionStateForNativeState:
+ (webrtc::PeerConnectionInterface::PeerConnectionState)nativeState {
+ switch (nativeState) {
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kNew:
+ return RTCPeerConnectionStateNew;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kConnecting:
+ return RTCPeerConnectionStateConnecting;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kConnected:
+ return RTCPeerConnectionStateConnected;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kFailed:
+ return RTCPeerConnectionStateFailed;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kDisconnected:
+ return RTCPeerConnectionStateDisconnected;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kClosed:
+ return RTCPeerConnectionStateClosed;
+ }
+}
+
++ (NSString *)stringForConnectionState:(RTCPeerConnectionState)state {
+ switch (state) {
+ case RTCPeerConnectionStateNew:
+ return @"NEW";
+ case RTCPeerConnectionStateConnecting:
+ return @"CONNECTING";
+ case RTCPeerConnectionStateConnected:
+ return @"CONNECTED";
+ case RTCPeerConnectionStateFailed:
+ return @"FAILED";
+ case RTCPeerConnectionStateDisconnected:
+ return @"DISCONNECTED";
+ case RTCPeerConnectionStateClosed:
+ return @"CLOSED";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::IceConnectionState)
+ nativeIceConnectionStateForState:(RTCIceConnectionState)state {
+ switch (state) {
+ case RTCIceConnectionStateNew:
+ return webrtc::PeerConnectionInterface::kIceConnectionNew;
+ case RTCIceConnectionStateChecking:
+ return webrtc::PeerConnectionInterface::kIceConnectionChecking;
+ case RTCIceConnectionStateConnected:
+ return webrtc::PeerConnectionInterface::kIceConnectionConnected;
+ case RTCIceConnectionStateCompleted:
+ return webrtc::PeerConnectionInterface::kIceConnectionCompleted;
+ case RTCIceConnectionStateFailed:
+ return webrtc::PeerConnectionInterface::kIceConnectionFailed;
+ case RTCIceConnectionStateDisconnected:
+ return webrtc::PeerConnectionInterface::kIceConnectionDisconnected;
+ case RTCIceConnectionStateClosed:
+ return webrtc::PeerConnectionInterface::kIceConnectionClosed;
+ case RTCIceConnectionStateCount:
+ return webrtc::PeerConnectionInterface::kIceConnectionMax;
+ }
+}
+
++ (RTCIceConnectionState)iceConnectionStateForNativeState:
+ (webrtc::PeerConnectionInterface::IceConnectionState)nativeState {
+ switch (nativeState) {
+ case webrtc::PeerConnectionInterface::kIceConnectionNew:
+ return RTCIceConnectionStateNew;
+ case webrtc::PeerConnectionInterface::kIceConnectionChecking:
+ return RTCIceConnectionStateChecking;
+ case webrtc::PeerConnectionInterface::kIceConnectionConnected:
+ return RTCIceConnectionStateConnected;
+ case webrtc::PeerConnectionInterface::kIceConnectionCompleted:
+ return RTCIceConnectionStateCompleted;
+ case webrtc::PeerConnectionInterface::kIceConnectionFailed:
+ return RTCIceConnectionStateFailed;
+ case webrtc::PeerConnectionInterface::kIceConnectionDisconnected:
+ return RTCIceConnectionStateDisconnected;
+ case webrtc::PeerConnectionInterface::kIceConnectionClosed:
+ return RTCIceConnectionStateClosed;
+ case webrtc::PeerConnectionInterface::kIceConnectionMax:
+ return RTCIceConnectionStateCount;
+ }
+}
+
++ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state {
+ switch (state) {
+ case RTCIceConnectionStateNew:
+ return @"NEW";
+ case RTCIceConnectionStateChecking:
+ return @"CHECKING";
+ case RTCIceConnectionStateConnected:
+ return @"CONNECTED";
+ case RTCIceConnectionStateCompleted:
+ return @"COMPLETED";
+ case RTCIceConnectionStateFailed:
+ return @"FAILED";
+ case RTCIceConnectionStateDisconnected:
+ return @"DISCONNECTED";
+ case RTCIceConnectionStateClosed:
+ return @"CLOSED";
+ case RTCIceConnectionStateCount:
+ return @"COUNT";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::IceGatheringState)
+ nativeIceGatheringStateForState:(RTCIceGatheringState)state {
+ switch (state) {
+ case RTCIceGatheringStateNew:
+ return webrtc::PeerConnectionInterface::kIceGatheringNew;
+ case RTCIceGatheringStateGathering:
+ return webrtc::PeerConnectionInterface::kIceGatheringGathering;
+ case RTCIceGatheringStateComplete:
+ return webrtc::PeerConnectionInterface::kIceGatheringComplete;
+ }
+}
+
++ (RTCIceGatheringState)iceGatheringStateForNativeState:
+ (webrtc::PeerConnectionInterface::IceGatheringState)nativeState {
+ switch (nativeState) {
+ case webrtc::PeerConnectionInterface::kIceGatheringNew:
+ return RTCIceGatheringStateNew;
+ case webrtc::PeerConnectionInterface::kIceGatheringGathering:
+ return RTCIceGatheringStateGathering;
+ case webrtc::PeerConnectionInterface::kIceGatheringComplete:
+ return RTCIceGatheringStateComplete;
+ }
+}
+
++ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state {
+ switch (state) {
+ case RTCIceGatheringStateNew:
+ return @"NEW";
+ case RTCIceGatheringStateGathering:
+ return @"GATHERING";
+ case RTCIceGatheringStateComplete:
+ return @"COMPLETE";
+ }
+}
+
++ (webrtc::PeerConnectionInterface::StatsOutputLevel)
+ nativeStatsOutputLevelForLevel:(RTCStatsOutputLevel)level {
+ switch (level) {
+ case RTCStatsOutputLevelStandard:
+ return webrtc::PeerConnectionInterface::kStatsOutputLevelStandard;
+ case RTCStatsOutputLevelDebug:
+ return webrtc::PeerConnectionInterface::kStatsOutputLevelDebug;
+ }
+}
+
+- (rtc::scoped_refptr<webrtc::PeerConnectionInterface>)nativePeerConnection {
+ return _peerConnection;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h
new file mode 100644
index 0000000000..f361b9f0ea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactory.h"
+
+#include "api/scoped_refptr.h"
+
+namespace webrtc {
+
+class AudioDeviceModule;
+class AudioEncoderFactory;
+class AudioDecoderFactory;
+class NetworkControllerFactoryInterface;
+class VideoEncoderFactory;
+class VideoDecoderFactory;
+class AudioProcessing;
+struct PeerConnectionDependencies;
+
+} // namespace webrtc
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * This class extension exposes methods that work directly with injectable C++ components.
+ */
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory)
+()
+
+ - (instancetype)initNative NS_DESIGNATED_INITIALIZER;
+
+/* Initializer used when WebRTC is compiled with no media support */
+- (instancetype)initWithNoMedia;
+
+/* Initialize object with injectable native audio/video encoder/decoder factories */
+- (instancetype)initWithNativeAudioEncoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
+ nativeAudioDecoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
+ nativeVideoEncoderFactory:
+ (std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
+ nativeVideoDecoderFactory:
+ (std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
+ audioDeviceModule:
+ (nullable webrtc::AudioDeviceModule *)audioDeviceModule
+ audioProcessingModule:
+ (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule;
+
+- (instancetype)
+ initWithNativeAudioEncoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
+ nativeAudioDecoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
+ nativeVideoEncoderFactory:
+ (std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
+ nativeVideoDecoderFactory:
+ (std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
+ audioDeviceModule:(nullable webrtc::AudioDeviceModule *)audioDeviceModule
+ audioProcessingModule:
+ (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule
+ networkControllerFactory:(std::unique_ptr<webrtc::NetworkControllerFactoryInterface>)
+ networkControllerFactory;
+
+- (instancetype)
+ initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
+ decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory;
+
+/** Initialize an RTCPeerConnection with a configuration, constraints, and
+ * dependencies.
+ */
+- (nullable RTC_OBJC_TYPE(RTCPeerConnection) *)
+ peerConnectionWithDependencies:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ dependencies:(std::unique_ptr<webrtc::PeerConnectionDependencies>)dependencies
+ delegate:(nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h
new file mode 100644
index 0000000000..9613646270
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactory.h"
+
+#include "api/peer_connection_interface.h"
+#include "api/scoped_refptr.h"
+#include "rtc_base/thread.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory)
+()
+
+ /**
+ * PeerConnectionFactoryInterface created and held by this
+ * RTCPeerConnectionFactory object. This is needed to pass to the underlying
+ * C++ APIs.
+ */
+ @property(nonatomic,
+ readonly) rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> nativeFactory;
+
+@property(nonatomic, readonly) rtc::Thread* signalingThread;
+@property(nonatomic, readonly) rtc::Thread* workerThread;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h
new file mode 100644
index 0000000000..88aac990f2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCAudioSource);
+@class RTC_OBJC_TYPE(RTCAudioTrack);
+@class RTC_OBJC_TYPE(RTCConfiguration);
+@class RTC_OBJC_TYPE(RTCMediaConstraints);
+@class RTC_OBJC_TYPE(RTCMediaStream);
+@class RTC_OBJC_TYPE(RTCPeerConnection);
+@class RTC_OBJC_TYPE(RTCVideoSource);
+@class RTC_OBJC_TYPE(RTCVideoTrack);
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions);
+@protocol RTC_OBJC_TYPE
+(RTCPeerConnectionDelegate);
+@protocol RTC_OBJC_TYPE
+(RTCVideoDecoderFactory);
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoderFactory);
+@protocol RTC_OBJC_TYPE
+(RTCSSLCertificateVerifier);
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory) : NSObject
+
+/* Initialize object with default H264 video encoder/decoder factories */
+- (instancetype)init;
+
+/* Initialize object with injectable video encoder/decoder factories */
+- (instancetype)
+ initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
+ decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory;
+
+/** Initialize an RTCAudioSource with constraints. */
+- (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints:
+ (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints;
+
+/** Initialize an RTCAudioTrack with an id. Convenience ctor to use an audio source
+ * with no constraints.
+ */
+- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithTrackId:(NSString *)trackId;
+
+/** Initialize an RTCAudioTrack with a source and an id. */
+- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithSource:(RTC_OBJC_TYPE(RTCAudioSource) *)source
+ trackId:(NSString *)trackId;
+
+/** Initialize a generic RTCVideoSource. The RTCVideoSource should be
+ * passed to a RTCVideoCapturer implementation, e.g.
+ * RTCCameraVideoCapturer, in order to produce frames.
+ */
+- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSource;
+
+/** Initialize a generic RTCVideoSource with he posibility of marking
+ * it as usable for screen sharing. The RTCVideoSource should be
+ * passed to a RTCVideoCapturer implementation, e.g.
+ * RTCCameraVideoCapturer, in order to produce frames.
+ */
+- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSourceForScreenCast:(BOOL)forScreenCast;
+
+/** Initialize an RTCVideoTrack with a source and an id. */
+- (RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrackWithSource:(RTC_OBJC_TYPE(RTCVideoSource) *)source
+ trackId:(NSString *)trackId;
+
+/** Initialize an RTCMediaStream with an id. */
+- (RTC_OBJC_TYPE(RTCMediaStream) *)mediaStreamWithStreamId:(NSString *)streamId;
+
+/** Initialize an RTCPeerConnection with a configuration, constraints, and
+ * delegate.
+ */
+- (nullable RTC_OBJC_TYPE(RTCPeerConnection) *)
+ peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ delegate:(nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate;
+
+- (nullable RTC_OBJC_TYPE(RTCPeerConnection) *)
+ peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ certificateVerifier:
+ (id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)>)certificateVerifier
+ delegate:(nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate;
+
+/** Set the options to be used for subsequently created RTCPeerConnections */
+- (void)setOptions:(nonnull RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions) *)options;
+
+/** Start an AecDump recording. This API call will likely change in the future. */
+- (BOOL)startAecDumpWithFilePath:(NSString *)filePath maxSizeInBytes:(int64_t)maxSizeInBytes;
+
+/* Stop an active AecDump recording */
+- (void)stopAecDump;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm
new file mode 100644
index 0000000000..84c5f020b5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm
@@ -0,0 +1,322 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#import "RTCPeerConnectionFactory+Native.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCPeerConnectionFactoryOptions+Private.h"
+
+#import "RTCAudioSource+Private.h"
+#import "RTCAudioTrack+Private.h"
+#import "RTCMediaConstraints+Private.h"
+#import "RTCMediaStream+Private.h"
+#import "RTCPeerConnection+Private.h"
+#import "RTCVideoSource+Private.h"
+#import "RTCVideoTrack+Private.h"
+#import "base/RTCLogging.h"
+#import "base/RTCVideoDecoderFactory.h"
+#import "base/RTCVideoEncoderFactory.h"
+#import "helpers/NSString+StdString.h"
+#include "rtc_base/checks.h"
+#include "sdk/objc/native/api/network_monitor_factory.h"
+#include "sdk/objc/native/api/ssl_certificate_verifier.h"
+#include "system_wrappers/include/field_trial.h"
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/transport/field_trial_based_config.h"
+#import "components/video_codec/RTCVideoDecoderFactoryH264.h"
+#import "components/video_codec/RTCVideoEncoderFactoryH264.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+
+#include "sdk/objc/native/api/video_decoder_factory.h"
+#include "sdk/objc/native/api/video_encoder_factory.h"
+#include "sdk/objc/native/src/objc_video_decoder_factory.h"
+#include "sdk/objc/native/src/objc_video_encoder_factory.h"
+
+#if defined(WEBRTC_IOS)
+#import "sdk/objc/native/api/audio_device_module.h"
+#endif
+
+@implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) {
+ std::unique_ptr<rtc::Thread> _networkThread;
+ std::unique_ptr<rtc::Thread> _workerThread;
+ std::unique_ptr<rtc::Thread> _signalingThread;
+ BOOL _hasStartedAecDump;
+}
+
+@synthesize nativeFactory = _nativeFactory;
+
+- (rtc::scoped_refptr<webrtc::AudioDeviceModule>)audioDeviceModule {
+#if defined(WEBRTC_IOS)
+ return webrtc::CreateAudioDeviceModule();
+#else
+ return nullptr;
+#endif
+}
+
+- (instancetype)init {
+ return [self
+ initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory()
+ nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory()
+ nativeVideoEncoderFactory:webrtc::ObjCToNativeVideoEncoderFactory([[RTC_OBJC_TYPE(
+ RTCVideoEncoderFactoryH264) alloc] init])
+ nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE(
+ RTCVideoDecoderFactoryH264) alloc] init])
+ audioDeviceModule:[self audioDeviceModule].get()
+ audioProcessingModule:nullptr];
+}
+
+- (instancetype)
+ initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
+ decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory {
+ std::unique_ptr<webrtc::VideoEncoderFactory> native_encoder_factory;
+ std::unique_ptr<webrtc::VideoDecoderFactory> native_decoder_factory;
+ if (encoderFactory) {
+ native_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory(encoderFactory);
+ }
+ if (decoderFactory) {
+ native_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(decoderFactory);
+ }
+ return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory()
+ nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory()
+ nativeVideoEncoderFactory:std::move(native_encoder_factory)
+ nativeVideoDecoderFactory:std::move(native_decoder_factory)
+ audioDeviceModule:[self audioDeviceModule].get()
+ audioProcessingModule:nullptr];
+}
+- (instancetype)initNative {
+ if (self = [super init]) {
+ _networkThread = rtc::Thread::CreateWithSocketServer();
+ _networkThread->SetName("network_thread", _networkThread.get());
+ BOOL result = _networkThread->Start();
+ RTC_DCHECK(result) << "Failed to start network thread.";
+
+ _workerThread = rtc::Thread::Create();
+ _workerThread->SetName("worker_thread", _workerThread.get());
+ result = _workerThread->Start();
+ RTC_DCHECK(result) << "Failed to start worker thread.";
+
+ _signalingThread = rtc::Thread::Create();
+ _signalingThread->SetName("signaling_thread", _signalingThread.get());
+ result = _signalingThread->Start();
+ RTC_DCHECK(result) << "Failed to start signaling thread.";
+ }
+ return self;
+}
+
+- (instancetype)initWithNoMedia {
+ if (self = [self initNative]) {
+ webrtc::PeerConnectionFactoryDependencies dependencies;
+ dependencies.network_thread = _networkThread.get();
+ dependencies.worker_thread = _workerThread.get();
+ dependencies.signaling_thread = _signalingThread.get();
+ if (webrtc::field_trial::IsEnabled("WebRTC-Network-UseNWPathMonitor")) {
+ dependencies.network_monitor_factory = webrtc::CreateNetworkMonitorFactory();
+ }
+ _nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies));
+ NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
+ }
+ return self;
+}
+
+- (instancetype)initWithNativeAudioEncoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
+ nativeAudioDecoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
+ nativeVideoEncoderFactory:
+ (std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
+ nativeVideoDecoderFactory:
+ (std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
+ audioDeviceModule:(webrtc::AudioDeviceModule *)audioDeviceModule
+ audioProcessingModule:
+ (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule {
+ return [self initWithNativeAudioEncoderFactory:audioEncoderFactory
+ nativeAudioDecoderFactory:audioDecoderFactory
+ nativeVideoEncoderFactory:std::move(videoEncoderFactory)
+ nativeVideoDecoderFactory:std::move(videoDecoderFactory)
+ audioDeviceModule:audioDeviceModule
+ audioProcessingModule:audioProcessingModule
+ networkControllerFactory:nullptr];
+}
+- (instancetype)initWithNativeAudioEncoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
+ nativeAudioDecoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
+ nativeVideoEncoderFactory:
+ (std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
+ nativeVideoDecoderFactory:
+ (std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
+ audioDeviceModule:(webrtc::AudioDeviceModule *)audioDeviceModule
+ audioProcessingModule:
+ (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule
+ networkControllerFactory:
+ (std::unique_ptr<webrtc::NetworkControllerFactoryInterface>)
+ networkControllerFactory {
+ if (self = [self initNative]) {
+ webrtc::PeerConnectionFactoryDependencies dependencies;
+ dependencies.network_thread = _networkThread.get();
+ dependencies.worker_thread = _workerThread.get();
+ dependencies.signaling_thread = _signalingThread.get();
+ if (webrtc::field_trial::IsEnabled("WebRTC-Network-UseNWPathMonitor")) {
+ dependencies.network_monitor_factory = webrtc::CreateNetworkMonitorFactory();
+ }
+ dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory();
+ dependencies.trials = std::make_unique<webrtc::FieldTrialBasedConfig>();
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.adm = std::move(audioDeviceModule);
+ media_deps.task_queue_factory = dependencies.task_queue_factory.get();
+ media_deps.audio_encoder_factory = std::move(audioEncoderFactory);
+ media_deps.audio_decoder_factory = std::move(audioDecoderFactory);
+ media_deps.video_encoder_factory = std::move(videoEncoderFactory);
+ media_deps.video_decoder_factory = std::move(videoDecoderFactory);
+ if (audioProcessingModule) {
+ media_deps.audio_processing = std::move(audioProcessingModule);
+ } else {
+ media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create();
+ }
+ media_deps.trials = dependencies.trials.get();
+ dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
+ dependencies.call_factory = webrtc::CreateCallFactory();
+ dependencies.event_log_factory =
+ std::make_unique<webrtc::RtcEventLogFactory>(dependencies.task_queue_factory.get());
+ dependencies.network_controller_factory = std::move(networkControllerFactory);
+ _nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies));
+ NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
+ }
+ return self;
+}
+
+- (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints:
+ (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints {
+ std::unique_ptr<webrtc::MediaConstraints> nativeConstraints;
+ if (constraints) {
+ nativeConstraints = constraints.nativeConstraints;
+ }
+ cricket::AudioOptions options;
+ CopyConstraintsIntoAudioOptions(nativeConstraints.get(), &options);
+
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
+ _nativeFactory->CreateAudioSource(options);
+ return [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self nativeAudioSource:source];
+}
+
+- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithTrackId:(NSString *)trackId {
+ RTC_OBJC_TYPE(RTCAudioSource) *audioSource = [self audioSourceWithConstraints:nil];
+ return [self audioTrackWithSource:audioSource trackId:trackId];
+}
+
+- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithSource:(RTC_OBJC_TYPE(RTCAudioSource) *)source
+ trackId:(NSString *)trackId {
+ return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:self source:source trackId:trackId];
+}
+
+- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSource {
+ return [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self
+ signalingThread:_signalingThread.get()
+ workerThread:_workerThread.get()];
+}
+
+- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSourceForScreenCast:(BOOL)forScreenCast {
+ return [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self
+ signalingThread:_signalingThread.get()
+ workerThread:_workerThread.get()
+ isScreenCast:forScreenCast];
+}
+
+- (RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrackWithSource:(RTC_OBJC_TYPE(RTCVideoSource) *)source
+ trackId:(NSString *)trackId {
+ return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:self source:source trackId:trackId];
+}
+
+- (RTC_OBJC_TYPE(RTCMediaStream) *)mediaStreamWithStreamId:(NSString *)streamId {
+ return [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:self streamId:streamId];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCPeerConnection) *)
+ peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ delegate:
+ (nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate {
+ return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithFactory:self
+ configuration:configuration
+ constraints:constraints
+ certificateVerifier:nil
+ delegate:delegate];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCPeerConnection) *)
+ peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ certificateVerifier:
+ (id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)>)certificateVerifier
+ delegate:
+ (nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate {
+ return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithFactory:self
+ configuration:configuration
+ constraints:constraints
+ certificateVerifier:certificateVerifier
+ delegate:delegate];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCPeerConnection) *)
+ peerConnectionWithDependencies:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+ constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+ dependencies:(std::unique_ptr<webrtc::PeerConnectionDependencies>)dependencies
+ delegate:(id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate {
+ return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithDependencies:self
+ configuration:configuration
+ constraints:constraints
+ dependencies:std::move(dependencies)
+ delegate:delegate];
+}
+
+- (void)setOptions:(nonnull RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions) *)options {
+ RTC_DCHECK(options != nil);
+ _nativeFactory->SetOptions(options.nativeOptions);
+}
+
+- (BOOL)startAecDumpWithFilePath:(NSString *)filePath
+ maxSizeInBytes:(int64_t)maxSizeInBytes {
+ RTC_DCHECK(filePath.length);
+ RTC_DCHECK_GT(maxSizeInBytes, 0);
+
+ if (_hasStartedAecDump) {
+ RTCLogError(@"Aec dump already started.");
+ return NO;
+ }
+ FILE *f = fopen(filePath.UTF8String, "wb");
+ if (!f) {
+ RTCLogError(@"Error opening file: %@. Error: %s", filePath, strerror(errno));
+ return NO;
+ }
+ _hasStartedAecDump = _nativeFactory->StartAecDump(f, maxSizeInBytes);
+ return _hasStartedAecDump;
+}
+
+- (void)stopAecDump {
+ _nativeFactory->StopAecDump();
+ _hasStartedAecDump = NO;
+}
+
+- (rtc::Thread *)signalingThread {
+ return _signalingThread.get();
+}
+
+- (rtc::Thread *)workerThread {
+ return _workerThread.get();
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h
new file mode 100644
index 0000000000..070a0e74a5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactoryBuilder.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCPeerConnectionFactoryBuilder (DefaultComponents)
+
++ (RTCPeerConnectionFactoryBuilder *)defaultBuilder;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm
new file mode 100644
index 0000000000..522e520e12
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactory+Native.h"
+#import "RTCPeerConnectionFactoryBuilder+DefaultComponents.h"
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#import "components/video_codec/RTCVideoDecoderFactoryH264.h"
+#import "components/video_codec/RTCVideoEncoderFactoryH264.h"
+#include "sdk/objc/native/api/video_decoder_factory.h"
+#include "sdk/objc/native/api/video_encoder_factory.h"
+
+#if defined(WEBRTC_IOS)
+#import "sdk/objc/native/api/audio_device_module.h"
+#endif
+
+@implementation RTCPeerConnectionFactoryBuilder (DefaultComponents)
+
++ (RTCPeerConnectionFactoryBuilder *)defaultBuilder {
+ RTCPeerConnectionFactoryBuilder *builder = [[RTCPeerConnectionFactoryBuilder alloc] init];
+ auto audioEncoderFactory = webrtc::CreateBuiltinAudioEncoderFactory();
+ [builder setAudioEncoderFactory:audioEncoderFactory];
+
+ auto audioDecoderFactory = webrtc::CreateBuiltinAudioDecoderFactory();
+ [builder setAudioDecoderFactory:audioDecoderFactory];
+
+ auto videoEncoderFactory = webrtc::ObjCToNativeVideoEncoderFactory(
+ [[RTC_OBJC_TYPE(RTCVideoEncoderFactoryH264) alloc] init]);
+ [builder setVideoEncoderFactory:std::move(videoEncoderFactory)];
+
+ auto videoDecoderFactory = webrtc::ObjCToNativeVideoDecoderFactory(
+ [[RTC_OBJC_TYPE(RTCVideoDecoderFactoryH264) alloc] init]);
+ [builder setVideoDecoderFactory:std::move(videoDecoderFactory)];
+
+#if defined(WEBRTC_IOS)
+ [builder setAudioDeviceModule:webrtc::CreateAudioDeviceModule()];
+#endif
+ return builder;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h
new file mode 100644
index 0000000000..f0b0de156a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactory.h"
+
+#include "api/scoped_refptr.h"
+
+namespace webrtc {
+
+class AudioDeviceModule;
+class AudioEncoderFactory;
+class AudioDecoderFactory;
+class VideoEncoderFactory;
+class VideoDecoderFactory;
+class AudioProcessing;
+
+} // namespace webrtc
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCPeerConnectionFactoryBuilder : NSObject
+
++ (RTCPeerConnectionFactoryBuilder *)builder;
+
+- (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory;
+
+- (void)setVideoEncoderFactory:(std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory;
+
+- (void)setVideoDecoderFactory:(std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory;
+
+- (void)setAudioEncoderFactory:(rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory;
+
+- (void)setAudioDecoderFactory:(rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory;
+
+- (void)setAudioDeviceModule:(rtc::scoped_refptr<webrtc::AudioDeviceModule>)audioDeviceModule;
+
+- (void)setAudioProcessingModule:(rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm
new file mode 100644
index 0000000000..627909a0e3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactoryBuilder.h"
+#import "RTCPeerConnectionFactory+Native.h"
+
+#include "api/audio_codecs/audio_decoder_factory.h"
+#include "api/audio_codecs/audio_encoder_factory.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+
+@implementation RTCPeerConnectionFactoryBuilder {
+ std::unique_ptr<webrtc::VideoEncoderFactory> _videoEncoderFactory;
+ std::unique_ptr<webrtc::VideoDecoderFactory> _videoDecoderFactory;
+ rtc::scoped_refptr<webrtc::AudioEncoderFactory> _audioEncoderFactory;
+ rtc::scoped_refptr<webrtc::AudioDecoderFactory> _audioDecoderFactory;
+ rtc::scoped_refptr<webrtc::AudioDeviceModule> _audioDeviceModule;
+ rtc::scoped_refptr<webrtc::AudioProcessing> _audioProcessingModule;
+}
+
++ (RTCPeerConnectionFactoryBuilder *)builder {
+ return [[RTCPeerConnectionFactoryBuilder alloc] init];
+}
+
+- (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+ [RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc];
+ return [factory initWithNativeAudioEncoderFactory:_audioEncoderFactory
+ nativeAudioDecoderFactory:_audioDecoderFactory
+ nativeVideoEncoderFactory:std::move(_videoEncoderFactory)
+ nativeVideoDecoderFactory:std::move(_videoDecoderFactory)
+ audioDeviceModule:_audioDeviceModule.get()
+ audioProcessingModule:_audioProcessingModule];
+}
+
+- (void)setVideoEncoderFactory:(std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory {
+ _videoEncoderFactory = std::move(videoEncoderFactory);
+}
+
+- (void)setVideoDecoderFactory:(std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory {
+ _videoDecoderFactory = std::move(videoDecoderFactory);
+}
+
+- (void)setAudioEncoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory {
+ _audioEncoderFactory = audioEncoderFactory;
+}
+
+- (void)setAudioDecoderFactory:
+ (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory {
+ _audioDecoderFactory = audioDecoderFactory;
+}
+
+- (void)setAudioDeviceModule:(rtc::scoped_refptr<webrtc::AudioDeviceModule>)audioDeviceModule {
+ _audioDeviceModule = audioDeviceModule;
+}
+
+- (void)setAudioProcessingModule:
+ (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule {
+ _audioProcessingModule = audioProcessingModule;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h
new file mode 100644
index 0000000000..8832b23695
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactoryOptions.h"
+
+#include "api/peer_connection_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions)
+()
+
+ /** Returns the equivalent native PeerConnectionFactoryInterface::Options
+ * structure. */
+ @property(nonatomic, readonly) webrtc::PeerConnectionFactoryInterface::Options nativeOptions;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h
new file mode 100644
index 0000000000..bfc54a5d7b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions) : NSObject
+
+@property(nonatomic, assign) BOOL disableEncryption;
+
+@property(nonatomic, assign) BOOL disableNetworkMonitor;
+
+@property(nonatomic, assign) BOOL ignoreLoopbackNetworkAdapter;
+
+@property(nonatomic, assign) BOOL ignoreVPNNetworkAdapter;
+
+@property(nonatomic, assign) BOOL ignoreCellularNetworkAdapter;
+
+@property(nonatomic, assign) BOOL ignoreWiFiNetworkAdapter;
+
+@property(nonatomic, assign) BOOL ignoreEthernetNetworkAdapter;
+
+- (instancetype)init NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm
new file mode 100644
index 0000000000..5467bd5fc9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCPeerConnectionFactoryOptions+Private.h"
+
+#include "rtc_base/network_constants.h"
+
+namespace {
+
+void setNetworkBit(webrtc::PeerConnectionFactoryInterface::Options* options,
+ rtc::AdapterType type,
+ bool ignore) {
+ if (ignore) {
+ options->network_ignore_mask |= type;
+ } else {
+ options->network_ignore_mask &= ~type;
+ }
+}
+} // namespace
+
+@implementation RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions)
+
+@synthesize disableEncryption = _disableEncryption;
+@synthesize disableNetworkMonitor = _disableNetworkMonitor;
+@synthesize ignoreLoopbackNetworkAdapter = _ignoreLoopbackNetworkAdapter;
+@synthesize ignoreVPNNetworkAdapter = _ignoreVPNNetworkAdapter;
+@synthesize ignoreCellularNetworkAdapter = _ignoreCellularNetworkAdapter;
+@synthesize ignoreWiFiNetworkAdapter = _ignoreWiFiNetworkAdapter;
+@synthesize ignoreEthernetNetworkAdapter = _ignoreEthernetNetworkAdapter;
+
+- (instancetype)init {
+ return [super init];
+}
+
+- (webrtc::PeerConnectionFactoryInterface::Options)nativeOptions {
+ webrtc::PeerConnectionFactoryInterface::Options options;
+ options.disable_encryption = self.disableEncryption;
+ options.disable_network_monitor = self.disableNetworkMonitor;
+
+ setNetworkBit(&options, rtc::ADAPTER_TYPE_LOOPBACK, self.ignoreLoopbackNetworkAdapter);
+ setNetworkBit(&options, rtc::ADAPTER_TYPE_VPN, self.ignoreVPNNetworkAdapter);
+ setNetworkBit(&options, rtc::ADAPTER_TYPE_CELLULAR, self.ignoreCellularNetworkAdapter);
+ setNetworkBit(&options, rtc::ADAPTER_TYPE_WIFI, self.ignoreWiFiNetworkAdapter);
+ setNetworkBit(&options, rtc::ADAPTER_TYPE_ETHERNET, self.ignoreEthernetNetworkAdapter);
+
+ return options;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h
new file mode 100644
index 0000000000..c4d196cf79
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtcpParameters.h"
+
+#include "api/rtp_parameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCRtcpParameters)
+()
+
+ /** Returns the equivalent native RtcpParameters structure. */
+ @property(nonatomic, readonly) webrtc::RtcpParameters nativeParameters;
+
+/** Initialize the object with a native RtcpParameters structure. */
+- (instancetype)initWithNativeParameters:(const webrtc::RtcpParameters &)nativeParameters
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.h
new file mode 100644
index 0000000000..2f7aad3aef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtcpParameters) : NSObject
+
+/** The Canonical Name used by RTCP. */
+@property(nonatomic, readonly, copy) NSString *cname;
+
+/** Whether reduced size RTCP is configured or compound RTCP. */
+@property(nonatomic, assign) BOOL isReducedSize;
+
+- (instancetype)init;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.mm
new file mode 100644
index 0000000000..e92ee4b3e7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtcpParameters.mm
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtcpParameters+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCRtcpParameters)
+
+@synthesize cname = _cname;
+@synthesize isReducedSize = _isReducedSize;
+
+- (instancetype)init {
+ webrtc::RtcpParameters nativeParameters;
+ return [self initWithNativeParameters:nativeParameters];
+}
+
+- (instancetype)initWithNativeParameters:(const webrtc::RtcpParameters &)nativeParameters {
+ if (self = [super init]) {
+ _cname = [NSString stringForStdString:nativeParameters.cname];
+ _isReducedSize = nativeParameters.reduced_size;
+ }
+ return self;
+}
+
+- (webrtc::RtcpParameters)nativeParameters {
+ webrtc::RtcpParameters parameters;
+ parameters.cname = [NSString stdStringForString:_cname];
+ parameters.reduced_size = _isReducedSize;
+ return parameters;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h
new file mode 100644
index 0000000000..ff23cfd642
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpCodecParameters.h"
+
+#include "api/rtp_parameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCRtpCodecParameters)
+()
+
+ /** Returns the equivalent native RtpCodecParameters structure. */
+ @property(nonatomic, readonly) webrtc::RtpCodecParameters nativeParameters;
+
+/** Initialize the object with a native RtpCodecParameters structure. */
+- (instancetype)initWithNativeParameters:(const webrtc::RtpCodecParameters &)nativeParameters
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h
new file mode 100644
index 0000000000..6135223720
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_EXTERN const NSString *const kRTCRtxCodecName;
+RTC_EXTERN const NSString *const kRTCRedCodecName;
+RTC_EXTERN const NSString *const kRTCUlpfecCodecName;
+RTC_EXTERN const NSString *const kRTCFlexfecCodecName;
+RTC_EXTERN const NSString *const kRTCOpusCodecName;
+RTC_EXTERN const NSString *const kRTCIsacCodecName;
+RTC_EXTERN const NSString *const kRTCL16CodecName;
+RTC_EXTERN const NSString *const kRTCG722CodecName;
+RTC_EXTERN const NSString *const kRTCIlbcCodecName;
+RTC_EXTERN const NSString *const kRTCPcmuCodecName;
+RTC_EXTERN const NSString *const kRTCPcmaCodecName;
+RTC_EXTERN const NSString *const kRTCDtmfCodecName;
+RTC_EXTERN const NSString *const kRTCComfortNoiseCodecName;
+RTC_EXTERN const NSString *const kRTCVp8CodecName;
+RTC_EXTERN const NSString *const kRTCVp9CodecName;
+RTC_EXTERN const NSString *const kRTCH264CodecName;
+
+/** Defined in https://www.w3.org/TR/webrtc/#idl-def-rtcrtpcodecparameters */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpCodecParameters) : NSObject
+
+/** The RTP payload type. */
+@property(nonatomic, assign) int payloadType;
+
+/**
+ * The codec MIME subtype. Valid types are listed in:
+ * http://www.iana.org/assignments/rtp-parameters/rtp-parameters.xhtml#rtp-parameters-2
+ *
+ * Several supported types are represented by the constants above.
+ */
+@property(nonatomic, readonly, nonnull) NSString *name;
+
+/**
+ * The media type of this codec. Equivalent to MIME top-level type.
+ *
+ * Valid values are kRTCMediaStreamTrackKindAudio and
+ * kRTCMediaStreamTrackKindVideo.
+ */
+@property(nonatomic, readonly, nonnull) NSString *kind;
+
+/** The codec clock rate expressed in Hertz. */
+@property(nonatomic, readonly, nullable) NSNumber *clockRate;
+
+/**
+ * The number of channels (mono=1, stereo=2).
+ * Set to null for video codecs.
+ **/
+@property(nonatomic, readonly, nullable) NSNumber *numChannels;
+
+/** The "format specific parameters" field from the "a=fmtp" line in the SDP */
+@property(nonatomic, readonly, nonnull) NSDictionary *parameters;
+
+- (instancetype)init;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm
new file mode 100644
index 0000000000..753667b635
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpCodecParameters+Private.h"
+
+#import "RTCMediaStreamTrack.h"
+#import "helpers/NSString+StdString.h"
+
+#include "media/base/media_constants.h"
+#include "rtc_base/checks.h"
+
+const NSString * const kRTCRtxCodecName = @(cricket::kRtxCodecName);
+const NSString * const kRTCRedCodecName = @(cricket::kRedCodecName);
+const NSString * const kRTCUlpfecCodecName = @(cricket::kUlpfecCodecName);
+const NSString * const kRTCFlexfecCodecName = @(cricket::kFlexfecCodecName);
+const NSString * const kRTCOpusCodecName = @(cricket::kOpusCodecName);
+const NSString * const kRTCIsacCodecName = @(cricket::kIsacCodecName);
+const NSString * const kRTCL16CodecName = @(cricket::kL16CodecName);
+const NSString * const kRTCG722CodecName = @(cricket::kG722CodecName);
+const NSString * const kRTCIlbcCodecName = @(cricket::kIlbcCodecName);
+const NSString * const kRTCPcmuCodecName = @(cricket::kPcmuCodecName);
+const NSString * const kRTCPcmaCodecName = @(cricket::kPcmaCodecName);
+const NSString * const kRTCDtmfCodecName = @(cricket::kDtmfCodecName);
+const NSString * const kRTCComfortNoiseCodecName =
+ @(cricket::kComfortNoiseCodecName);
+const NSString * const kRTCVp8CodecName = @(cricket::kVp8CodecName);
+const NSString * const kRTCVp9CodecName = @(cricket::kVp9CodecName);
+const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName);
+
+@implementation RTC_OBJC_TYPE (RTCRtpCodecParameters)
+
+@synthesize payloadType = _payloadType;
+@synthesize name = _name;
+@synthesize kind = _kind;
+@synthesize clockRate = _clockRate;
+@synthesize numChannels = _numChannels;
+@synthesize parameters = _parameters;
+
+- (instancetype)init {
+ webrtc::RtpCodecParameters nativeParameters;
+ return [self initWithNativeParameters:nativeParameters];
+}
+
+- (instancetype)initWithNativeParameters:
+ (const webrtc::RtpCodecParameters &)nativeParameters {
+ if (self = [super init]) {
+ _payloadType = nativeParameters.payload_type;
+ _name = [NSString stringForStdString:nativeParameters.name];
+ switch (nativeParameters.kind) {
+ case cricket::MEDIA_TYPE_AUDIO:
+ _kind = kRTCMediaStreamTrackKindAudio;
+ break;
+ case cricket::MEDIA_TYPE_VIDEO:
+ _kind = kRTCMediaStreamTrackKindVideo;
+ break;
+ case cricket::MEDIA_TYPE_DATA:
+ RTC_DCHECK_NOTREACHED();
+ break;
+ case cricket::MEDIA_TYPE_UNSUPPORTED:
+ RTC_DCHECK_NOTREACHED();
+ break;
+ }
+ if (nativeParameters.clock_rate) {
+ _clockRate = [NSNumber numberWithInt:*nativeParameters.clock_rate];
+ }
+ if (nativeParameters.num_channels) {
+ _numChannels = [NSNumber numberWithInt:*nativeParameters.num_channels];
+ }
+ NSMutableDictionary *parameters = [NSMutableDictionary dictionary];
+ for (const auto &parameter : nativeParameters.parameters) {
+ [parameters setObject:[NSString stringForStdString:parameter.second]
+ forKey:[NSString stringForStdString:parameter.first]];
+ }
+ _parameters = parameters;
+ }
+ return self;
+}
+
+- (webrtc::RtpCodecParameters)nativeParameters {
+ webrtc::RtpCodecParameters parameters;
+ parameters.payload_type = _payloadType;
+ parameters.name = [NSString stdStringForString:_name];
+ // NSString pointer comparison is safe here since "kind" is readonly and only
+ // populated above.
+ if (_kind == kRTCMediaStreamTrackKindAudio) {
+ parameters.kind = cricket::MEDIA_TYPE_AUDIO;
+ } else if (_kind == kRTCMediaStreamTrackKindVideo) {
+ parameters.kind = cricket::MEDIA_TYPE_VIDEO;
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ }
+ if (_clockRate != nil) {
+ parameters.clock_rate = absl::optional<int>(_clockRate.intValue);
+ }
+ if (_numChannels != nil) {
+ parameters.num_channels = absl::optional<int>(_numChannels.intValue);
+ }
+ for (NSString *paramKey in _parameters.allKeys) {
+ std::string key = [NSString stdStringForString:paramKey];
+ std::string value = [NSString stdStringForString:_parameters[paramKey]];
+ parameters.parameters[key] = value;
+ }
+ return parameters;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h
new file mode 100644
index 0000000000..d12ca624e3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpEncodingParameters.h"
+
+#include "api/rtp_parameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCRtpEncodingParameters)
+()
+
+ /** Returns the equivalent native RtpEncodingParameters structure. */
+ @property(nonatomic, readonly) webrtc::RtpEncodingParameters nativeParameters;
+
+/** Initialize the object with a native RtpEncodingParameters structure. */
+- (instancetype)initWithNativeParameters:(const webrtc::RtpEncodingParameters &)nativeParameters
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h
new file mode 100644
index 0000000000..07f6b7a39c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Corresponds to webrtc::Priority. */
+typedef NS_ENUM(NSInteger, RTCPriority) {
+ RTCPriorityVeryLow,
+ RTCPriorityLow,
+ RTCPriorityMedium,
+ RTCPriorityHigh
+};
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpEncodingParameters) : NSObject
+
+/** The idenfifier for the encoding layer. This is used in simulcast. */
+@property(nonatomic, copy, nullable) NSString *rid;
+
+/** Controls whether the encoding is currently transmitted. */
+@property(nonatomic, assign) BOOL isActive;
+
+/** The maximum bitrate to use for the encoding, or nil if there is no
+ * limit.
+ */
+@property(nonatomic, copy, nullable) NSNumber *maxBitrateBps;
+
+/** The minimum bitrate to use for the encoding, or nil if there is no
+ * limit.
+ */
+@property(nonatomic, copy, nullable) NSNumber *minBitrateBps;
+
+/** The maximum framerate to use for the encoding, or nil if there is no
+ * limit.
+ */
+@property(nonatomic, copy, nullable) NSNumber *maxFramerate;
+
+/** The requested number of temporal layers to use for the encoding, or nil
+ * if the default should be used.
+ */
+@property(nonatomic, copy, nullable) NSNumber *numTemporalLayers;
+
+/** Scale the width and height down by this factor for video. If nil,
+ * implementation default scaling factor will be used.
+ */
+@property(nonatomic, copy, nullable) NSNumber *scaleResolutionDownBy;
+
+/** The SSRC being used by this encoding. */
+@property(nonatomic, readonly, nullable) NSNumber *ssrc;
+
+/** The relative bitrate priority. */
+@property(nonatomic, assign) double bitratePriority;
+
+/** The relative DiffServ Code Point priority. */
+@property(nonatomic, assign) RTCPriority networkPriority;
+
+/** Allow dynamic frame length changes for audio:
+ https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime */
+@property(nonatomic, assign) BOOL adaptiveAudioPacketTime;
+
+- (instancetype)init;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm
new file mode 100644
index 0000000000..d6087dafb0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm
@@ -0,0 +1,128 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpEncodingParameters+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCRtpEncodingParameters)
+
+@synthesize rid = _rid;
+@synthesize isActive = _isActive;
+@synthesize maxBitrateBps = _maxBitrateBps;
+@synthesize minBitrateBps = _minBitrateBps;
+@synthesize maxFramerate = _maxFramerate;
+@synthesize numTemporalLayers = _numTemporalLayers;
+@synthesize scaleResolutionDownBy = _scaleResolutionDownBy;
+@synthesize ssrc = _ssrc;
+@synthesize bitratePriority = _bitratePriority;
+@synthesize networkPriority = _networkPriority;
+@synthesize adaptiveAudioPacketTime = _adaptiveAudioPacketTime;
+
+- (instancetype)init {
+ webrtc::RtpEncodingParameters nativeParameters;
+ return [self initWithNativeParameters:nativeParameters];
+}
+
+- (instancetype)initWithNativeParameters:
+ (const webrtc::RtpEncodingParameters &)nativeParameters {
+ if (self = [super init]) {
+ if (!nativeParameters.rid.empty()) {
+ _rid = [NSString stringForStdString:nativeParameters.rid];
+ }
+ _isActive = nativeParameters.active;
+ if (nativeParameters.max_bitrate_bps) {
+ _maxBitrateBps =
+ [NSNumber numberWithInt:*nativeParameters.max_bitrate_bps];
+ }
+ if (nativeParameters.min_bitrate_bps) {
+ _minBitrateBps =
+ [NSNumber numberWithInt:*nativeParameters.min_bitrate_bps];
+ }
+ if (nativeParameters.max_framerate) {
+ _maxFramerate = [NSNumber numberWithInt:*nativeParameters.max_framerate];
+ }
+ if (nativeParameters.num_temporal_layers) {
+ _numTemporalLayers = [NSNumber numberWithInt:*nativeParameters.num_temporal_layers];
+ }
+ if (nativeParameters.scale_resolution_down_by) {
+ _scaleResolutionDownBy =
+ [NSNumber numberWithDouble:*nativeParameters.scale_resolution_down_by];
+ }
+ if (nativeParameters.ssrc) {
+ _ssrc = [NSNumber numberWithUnsignedLong:*nativeParameters.ssrc];
+ }
+ _bitratePriority = nativeParameters.bitrate_priority;
+ _networkPriority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters)
+ priorityFromNativePriority:nativeParameters.network_priority];
+ _adaptiveAudioPacketTime = nativeParameters.adaptive_ptime;
+ }
+ return self;
+}
+
+- (webrtc::RtpEncodingParameters)nativeParameters {
+ webrtc::RtpEncodingParameters parameters;
+ if (_rid != nil) {
+ parameters.rid = [NSString stdStringForString:_rid];
+ }
+ parameters.active = _isActive;
+ if (_maxBitrateBps != nil) {
+ parameters.max_bitrate_bps = absl::optional<int>(_maxBitrateBps.intValue);
+ }
+ if (_minBitrateBps != nil) {
+ parameters.min_bitrate_bps = absl::optional<int>(_minBitrateBps.intValue);
+ }
+ if (_maxFramerate != nil) {
+ parameters.max_framerate = absl::optional<int>(_maxFramerate.intValue);
+ }
+ if (_numTemporalLayers != nil) {
+ parameters.num_temporal_layers = absl::optional<int>(_numTemporalLayers.intValue);
+ }
+ if (_scaleResolutionDownBy != nil) {
+ parameters.scale_resolution_down_by =
+ absl::optional<double>(_scaleResolutionDownBy.doubleValue);
+ }
+ if (_ssrc != nil) {
+ parameters.ssrc = absl::optional<uint32_t>(_ssrc.unsignedLongValue);
+ }
+ parameters.bitrate_priority = _bitratePriority;
+ parameters.network_priority =
+ [RTC_OBJC_TYPE(RTCRtpEncodingParameters) nativePriorityFromPriority:_networkPriority];
+ parameters.adaptive_ptime = _adaptiveAudioPacketTime;
+ return parameters;
+}
+
++ (webrtc::Priority)nativePriorityFromPriority:(RTCPriority)networkPriority {
+ switch (networkPriority) {
+ case RTCPriorityVeryLow:
+ return webrtc::Priority::kVeryLow;
+ case RTCPriorityLow:
+ return webrtc::Priority::kLow;
+ case RTCPriorityMedium:
+ return webrtc::Priority::kMedium;
+ case RTCPriorityHigh:
+ return webrtc::Priority::kHigh;
+ }
+}
+
++ (RTCPriority)priorityFromNativePriority:(webrtc::Priority)nativePriority {
+ switch (nativePriority) {
+ case webrtc::Priority::kVeryLow:
+ return RTCPriorityVeryLow;
+ case webrtc::Priority::kLow:
+ return RTCPriorityLow;
+ case webrtc::Priority::kMedium:
+ return RTCPriorityMedium;
+ case webrtc::Priority::kHigh:
+ return RTCPriorityHigh;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h
new file mode 100644
index 0000000000..0e0fbba5ac
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpHeaderExtension.h"
+
+#include "api/rtp_parameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCRtpHeaderExtension)
+()
+
+ /** Returns the equivalent native RtpExtension structure. */
+ @property(nonatomic, readonly) webrtc::RtpExtension nativeParameters;
+
+/** Initialize the object with a native RtpExtension structure. */
+- (instancetype)initWithNativeParameters:(const webrtc::RtpExtension &)nativeParameters
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h
new file mode 100644
index 0000000000..4000bf5372
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpHeaderExtension) : NSObject
+
+/** The URI of the RTP header extension, as defined in RFC5285. */
+@property(nonatomic, readonly, copy) NSString *uri;
+
+/** The value put in the RTP packet to identify the header extension. */
+@property(nonatomic, readonly) int id;
+
+/** Whether the header extension is encrypted or not. */
+@property(nonatomic, readonly, getter=isEncrypted) BOOL encrypted;
+
+- (instancetype)init;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm
new file mode 100644
index 0000000000..68093e92ea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpHeaderExtension+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCRtpHeaderExtension)
+
+@synthesize uri = _uri;
+@synthesize id = _id;
+@synthesize encrypted = _encrypted;
+
+- (instancetype)init {
+ webrtc::RtpExtension nativeExtension;
+ return [self initWithNativeParameters:nativeExtension];
+}
+
+- (instancetype)initWithNativeParameters:(const webrtc::RtpExtension &)nativeParameters {
+ if (self = [super init]) {
+ _uri = [NSString stringForStdString:nativeParameters.uri];
+ _id = nativeParameters.id;
+ _encrypted = nativeParameters.encrypt;
+ }
+ return self;
+}
+
+- (webrtc::RtpExtension)nativeParameters {
+ webrtc::RtpExtension extension;
+ extension.uri = [NSString stdStringForString:_uri];
+ extension.id = _id;
+ extension.encrypt = _encrypted;
+ return extension;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h
new file mode 100644
index 0000000000..139617f727
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpParameters.h"
+
+#include "api/rtp_parameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCRtpParameters)
+()
+
+ /** Returns the equivalent native RtpParameters structure. */
+ @property(nonatomic, readonly) webrtc::RtpParameters nativeParameters;
+
+/** Initialize the object with a native RtpParameters structure. */
+- (instancetype)initWithNativeParameters:(const webrtc::RtpParameters &)nativeParameters
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.h
new file mode 100644
index 0000000000..3d71c55ab9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCRtcpParameters.h"
+#import "RTCRtpCodecParameters.h"
+#import "RTCRtpEncodingParameters.h"
+#import "RTCRtpHeaderExtension.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Corresponds to webrtc::DegradationPreference. */
+typedef NS_ENUM(NSInteger, RTCDegradationPreference) {
+ RTCDegradationPreferenceDisabled,
+ RTCDegradationPreferenceMaintainFramerate,
+ RTCDegradationPreferenceMaintainResolution,
+ RTCDegradationPreferenceBalanced
+};
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpParameters) : NSObject
+
+/** A unique identifier for the last set of parameters applied. */
+@property(nonatomic, copy) NSString *transactionId;
+
+/** Parameters used for RTCP. */
+@property(nonatomic, readonly, copy) RTC_OBJC_TYPE(RTCRtcpParameters) * rtcp;
+
+/** An array containing parameters for RTP header extensions. */
+@property(nonatomic, readonly, copy)
+ NSArray<RTC_OBJC_TYPE(RTCRtpHeaderExtension) *> *headerExtensions;
+
+/** The currently active encodings in the order of preference. */
+@property(nonatomic, copy) NSArray<RTC_OBJC_TYPE(RTCRtpEncodingParameters) *> *encodings;
+
+/** The negotiated set of send codecs in order of preference. */
+@property(nonatomic, copy) NSArray<RTC_OBJC_TYPE(RTCRtpCodecParameters) *> *codecs;
+
+/**
+ * Degradation preference in case of CPU adaptation or constrained bandwidth.
+ * If nil, implementation default degradation preference will be used.
+ */
+@property(nonatomic, copy, nullable) NSNumber *degradationPreference;
+
+- (instancetype)init;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.mm
new file mode 100644
index 0000000000..2baf0ecd80
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpParameters.mm
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpParameters+Private.h"
+
+#import "RTCRtcpParameters+Private.h"
+#import "RTCRtpCodecParameters+Private.h"
+#import "RTCRtpEncodingParameters+Private.h"
+#import "RTCRtpHeaderExtension+Private.h"
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCRtpParameters)
+
+@synthesize transactionId = _transactionId;
+@synthesize rtcp = _rtcp;
+@synthesize headerExtensions = _headerExtensions;
+@synthesize encodings = _encodings;
+@synthesize codecs = _codecs;
+@synthesize degradationPreference = _degradationPreference;
+
+- (instancetype)init {
+ webrtc::RtpParameters nativeParameters;
+ return [self initWithNativeParameters:nativeParameters];
+}
+
+- (instancetype)initWithNativeParameters:
+ (const webrtc::RtpParameters &)nativeParameters {
+ if (self = [super init]) {
+ _transactionId = [NSString stringForStdString:nativeParameters.transaction_id];
+ _rtcp =
+ [[RTC_OBJC_TYPE(RTCRtcpParameters) alloc] initWithNativeParameters:nativeParameters.rtcp];
+
+ NSMutableArray *headerExtensions = [[NSMutableArray alloc] init];
+ for (const auto &headerExtension : nativeParameters.header_extensions) {
+ [headerExtensions addObject:[[RTC_OBJC_TYPE(RTCRtpHeaderExtension) alloc]
+ initWithNativeParameters:headerExtension]];
+ }
+ _headerExtensions = headerExtensions;
+
+ NSMutableArray *encodings = [[NSMutableArray alloc] init];
+ for (const auto &encoding : nativeParameters.encodings) {
+ [encodings addObject:[[RTC_OBJC_TYPE(RTCRtpEncodingParameters) alloc]
+ initWithNativeParameters:encoding]];
+ }
+ _encodings = encodings;
+
+ NSMutableArray *codecs = [[NSMutableArray alloc] init];
+ for (const auto &codec : nativeParameters.codecs) {
+ [codecs
+ addObject:[[RTC_OBJC_TYPE(RTCRtpCodecParameters) alloc] initWithNativeParameters:codec]];
+ }
+ _codecs = codecs;
+
+ _degradationPreference = [RTC_OBJC_TYPE(RTCRtpParameters)
+ degradationPreferenceFromNativeDegradationPreference:nativeParameters
+ .degradation_preference];
+ }
+ return self;
+}
+
+- (webrtc::RtpParameters)nativeParameters {
+ webrtc::RtpParameters parameters;
+ parameters.transaction_id = [NSString stdStringForString:_transactionId];
+ parameters.rtcp = [_rtcp nativeParameters];
+ for (RTC_OBJC_TYPE(RTCRtpHeaderExtension) * headerExtension in _headerExtensions) {
+ parameters.header_extensions.push_back(headerExtension.nativeParameters);
+ }
+ for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * encoding in _encodings) {
+ parameters.encodings.push_back(encoding.nativeParameters);
+ }
+ for (RTC_OBJC_TYPE(RTCRtpCodecParameters) * codec in _codecs) {
+ parameters.codecs.push_back(codec.nativeParameters);
+ }
+ if (_degradationPreference) {
+ parameters.degradation_preference = [RTC_OBJC_TYPE(RTCRtpParameters)
+ nativeDegradationPreferenceFromDegradationPreference:(RTCDegradationPreference)
+ _degradationPreference.intValue];
+ }
+ return parameters;
+}
+
++ (webrtc::DegradationPreference)nativeDegradationPreferenceFromDegradationPreference:
+ (RTCDegradationPreference)degradationPreference {
+ switch (degradationPreference) {
+ case RTCDegradationPreferenceDisabled:
+ return webrtc::DegradationPreference::DISABLED;
+ case RTCDegradationPreferenceMaintainFramerate:
+ return webrtc::DegradationPreference::MAINTAIN_FRAMERATE;
+ case RTCDegradationPreferenceMaintainResolution:
+ return webrtc::DegradationPreference::MAINTAIN_RESOLUTION;
+ case RTCDegradationPreferenceBalanced:
+ return webrtc::DegradationPreference::BALANCED;
+ }
+}
+
++ (NSNumber *)degradationPreferenceFromNativeDegradationPreference:
+ (absl::optional<webrtc::DegradationPreference>)nativeDegradationPreference {
+ if (!nativeDegradationPreference.has_value()) {
+ return nil;
+ }
+
+ switch (*nativeDegradationPreference) {
+ case webrtc::DegradationPreference::DISABLED:
+ return @(RTCDegradationPreferenceDisabled);
+ case webrtc::DegradationPreference::MAINTAIN_FRAMERATE:
+ return @(RTCDegradationPreferenceMaintainFramerate);
+ case webrtc::DegradationPreference::MAINTAIN_RESOLUTION:
+ return @(RTCDegradationPreferenceMaintainResolution);
+ case webrtc::DegradationPreference::BALANCED:
+ return @(RTCDegradationPreferenceBalanced);
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h
new file mode 100644
index 0000000000..c15ce70079
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpReceiver.h"
+
+#include "api/crypto/frame_decryptor_interface.h"
+#include "api/scoped_refptr.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * This class extension exposes methods that work directly with injectable C++ components.
+ */
+@interface RTC_OBJC_TYPE (RTCRtpReceiver)
+()
+
+ /** Sets a user defined frame decryptor that will decrypt the entire frame.
+ * This will decrypt the entire frame using the user provided decryption
+ * mechanism regardless of whether SRTP is enabled or not.
+ */
+ - (void)setFrameDecryptor : (rtc::scoped_refptr<webrtc::FrameDecryptorInterface>)frameDecryptor;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h
new file mode 100644
index 0000000000..6aed0b4bc5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpReceiver.h"
+
+#include "api/rtp_receiver_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+
+namespace webrtc {
+
+class RtpReceiverDelegateAdapter : public RtpReceiverObserverInterface {
+ public:
+ RtpReceiverDelegateAdapter(RTC_OBJC_TYPE(RTCRtpReceiver) * receiver);
+
+ void OnFirstPacketReceived(cricket::MediaType media_type) override;
+
+ private:
+ __weak RTC_OBJC_TYPE(RTCRtpReceiver) * receiver_;
+};
+
+} // namespace webrtc
+
+@interface RTC_OBJC_TYPE (RTCRtpReceiver)
+()
+
+ @property(nonatomic,
+ readonly) rtc::scoped_refptr<webrtc::RtpReceiverInterface> nativeRtpReceiver;
+
+/** Initialize an RTCRtpReceiver with a native RtpReceiverInterface. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeRtpReceiver:(rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver
+ NS_DESIGNATED_INITIALIZER;
+
++ (RTCRtpMediaType)mediaTypeForNativeMediaType:(cricket::MediaType)nativeMediaType;
+
++ (cricket::MediaType)nativeMediaTypeForMediaType:(RTCRtpMediaType)mediaType;
+
++ (NSString*)stringForMediaType:(RTCRtpMediaType)mediaType;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.h
new file mode 100644
index 0000000000..1e407fd71b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCMediaStreamTrack.h"
+#import "RTCRtpParameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Represents the media type of the RtpReceiver. */
+typedef NS_ENUM(NSInteger, RTCRtpMediaType) {
+ RTCRtpMediaTypeAudio,
+ RTCRtpMediaTypeVideo,
+ RTCRtpMediaTypeData,
+ RTCRtpMediaTypeUnsupported,
+};
+
+@class RTC_OBJC_TYPE(RTCRtpReceiver);
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCRtpReceiverDelegate)<NSObject>
+
+ /** Called when the first RTP packet is received.
+ *
+ * Note: Currently if there are multiple RtpReceivers of the same media type,
+ * they will all call OnFirstPacketReceived at once.
+ *
+ * For example, if we create three audio receivers, A/B/C, they will listen to
+ * the same signal from the underneath network layer. Whenever the first audio packet
+ * is received, the underneath signal will be fired. All the receivers A/B/C will be
+ * notified and the callback of the receiver's delegate will be called.
+ *
+ * The process is the same for video receivers.
+ */
+ - (void)rtpReceiver
+ : (RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver didReceiveFirstPacketForMediaType
+ : (RTCRtpMediaType)mediaType;
+
+@end
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCRtpReceiver)<NSObject>
+
+ /** A unique identifier for this receiver. */
+ @property(nonatomic, readonly) NSString *receiverId;
+
+/** The currently active RTCRtpParameters, as defined in
+ * https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters.
+ *
+ * The WebRTC specification only defines RTCRtpParameters in terms of senders,
+ * but this API also applies them to receivers, similar to ORTC:
+ * http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*.
+ */
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpParameters) * parameters;
+
+/** The RTCMediaStreamTrack associated with the receiver.
+ * Note: reading this property returns a new instance of
+ * RTCMediaStreamTrack. Use isEqual: instead of == to compare
+ * RTCMediaStreamTrack instances.
+ */
+@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCMediaStreamTrack) * track;
+
+/** The delegate for this RtpReceiver. */
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCRtpReceiverDelegate)> delegate;
+
+@end
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpReceiver) : NSObject <RTC_OBJC_TYPE(RTCRtpReceiver)>
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.mm
new file mode 100644
index 0000000000..60af86ac1b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpReceiver.mm
@@ -0,0 +1,159 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpReceiver+Private.h"
+
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCRtpParameters+Private.h"
+#import "RTCRtpReceiver+Native.h"
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+#include "api/media_stream_interface.h"
+
+namespace webrtc {
+
+RtpReceiverDelegateAdapter::RtpReceiverDelegateAdapter(RTC_OBJC_TYPE(RTCRtpReceiver) * receiver) {
+ RTC_CHECK(receiver);
+ receiver_ = receiver;
+}
+
+void RtpReceiverDelegateAdapter::OnFirstPacketReceived(
+ cricket::MediaType media_type) {
+ RTCRtpMediaType packet_media_type =
+ [RTC_OBJC_TYPE(RTCRtpReceiver) mediaTypeForNativeMediaType:media_type];
+ RTC_OBJC_TYPE(RTCRtpReceiver) *receiver = receiver_;
+ [receiver.delegate rtpReceiver:receiver didReceiveFirstPacketForMediaType:packet_media_type];
+}
+
+} // namespace webrtc
+
+@implementation RTC_OBJC_TYPE (RTCRtpReceiver) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ rtc::scoped_refptr<webrtc::RtpReceiverInterface> _nativeRtpReceiver;
+ std::unique_ptr<webrtc::RtpReceiverDelegateAdapter> _observer;
+}
+
+@synthesize delegate = _delegate;
+
+- (NSString *)receiverId {
+ return [NSString stringForStdString:_nativeRtpReceiver->id()];
+}
+
+- (RTC_OBJC_TYPE(RTCRtpParameters) *)parameters {
+ return [[RTC_OBJC_TYPE(RTCRtpParameters) alloc]
+ initWithNativeParameters:_nativeRtpReceiver->GetParameters()];
+}
+
+- (nullable RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
+ _nativeRtpReceiver->track());
+ if (nativeTrack) {
+ return [RTC_OBJC_TYPE(RTCMediaStreamTrack) mediaTrackForNativeTrack:nativeTrack
+ factory:_factory];
+ }
+ return nil;
+}
+
+- (NSString *)description {
+ return [NSString
+ stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpReceiver) {\n receiverId: %@\n}", self.receiverId];
+}
+
+- (void)dealloc {
+ if (_nativeRtpReceiver) {
+ _nativeRtpReceiver->SetObserver(nullptr);
+ }
+}
+
+- (BOOL)isEqual:(id)object {
+ if (self == object) {
+ return YES;
+ }
+ if (object == nil) {
+ return NO;
+ }
+ if (![object isMemberOfClass:[self class]]) {
+ return NO;
+ }
+ RTC_OBJC_TYPE(RTCRtpReceiver) *receiver = (RTC_OBJC_TYPE(RTCRtpReceiver) *)object;
+ return _nativeRtpReceiver == receiver.nativeRtpReceiver;
+}
+
+- (NSUInteger)hash {
+ return (NSUInteger)_nativeRtpReceiver.get();
+}
+
+#pragma mark - Native
+
+- (void)setFrameDecryptor:(rtc::scoped_refptr<webrtc::FrameDecryptorInterface>)frameDecryptor {
+ _nativeRtpReceiver->SetFrameDecryptor(frameDecryptor);
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
+ return _nativeRtpReceiver;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeRtpReceiver:
+ (rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
+ if (self = [super init]) {
+ _factory = factory;
+ _nativeRtpReceiver = nativeRtpReceiver;
+ RTCLogInfo(@"RTC_OBJC_TYPE(RTCRtpReceiver)(%p): created receiver: %@", self, self.description);
+ _observer.reset(new webrtc::RtpReceiverDelegateAdapter(self));
+ _nativeRtpReceiver->SetObserver(_observer.get());
+ }
+ return self;
+}
+
++ (RTCRtpMediaType)mediaTypeForNativeMediaType:
+ (cricket::MediaType)nativeMediaType {
+ switch (nativeMediaType) {
+ case cricket::MEDIA_TYPE_AUDIO:
+ return RTCRtpMediaTypeAudio;
+ case cricket::MEDIA_TYPE_VIDEO:
+ return RTCRtpMediaTypeVideo;
+ case cricket::MEDIA_TYPE_DATA:
+ return RTCRtpMediaTypeData;
+ case cricket::MEDIA_TYPE_UNSUPPORTED:
+ return RTCRtpMediaTypeUnsupported;
+ }
+}
+
++ (cricket::MediaType)nativeMediaTypeForMediaType:(RTCRtpMediaType)mediaType {
+ switch (mediaType) {
+ case RTCRtpMediaTypeAudio:
+ return cricket::MEDIA_TYPE_AUDIO;
+ case RTCRtpMediaTypeVideo:
+ return cricket::MEDIA_TYPE_VIDEO;
+ case RTCRtpMediaTypeData:
+ return cricket::MEDIA_TYPE_DATA;
+ case RTCRtpMediaTypeUnsupported:
+ return cricket::MEDIA_TYPE_UNSUPPORTED;
+ }
+}
+
++ (NSString *)stringForMediaType:(RTCRtpMediaType)mediaType {
+ switch (mediaType) {
+ case RTCRtpMediaTypeAudio:
+ return @"AUDIO";
+ case RTCRtpMediaTypeVideo:
+ return @"VIDEO";
+ case RTCRtpMediaTypeData:
+ return @"DATA";
+ case RTCRtpMediaTypeUnsupported:
+ return @"UNSUPPORTED";
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Native.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Native.h
new file mode 100644
index 0000000000..249d5c5e09
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Native.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpSender.h"
+
+#include "api/crypto/frame_encryptor_interface.h"
+#include "api/scoped_refptr.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * This class extension exposes methods that work directly with injectable C++ components.
+ */
+@interface RTC_OBJC_TYPE (RTCRtpSender)
+()
+
+ /** Sets a defined frame encryptor that will encrypt the entire frame
+ * before it is sent across the network. This will encrypt the entire frame
+ * using the user provided encryption mechanism regardless of whether SRTP is
+ * enabled or not.
+ */
+ - (void)setFrameEncryptor : (rtc::scoped_refptr<webrtc::FrameEncryptorInterface>)frameEncryptor;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Private.h
new file mode 100644
index 0000000000..6fdb42bb22
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender+Private.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpSender.h"
+
+#include "api/rtp_sender_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+
+@interface RTC_OBJC_TYPE (RTCRtpSender)
+()
+
+ @property(nonatomic, readonly) rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeRtpSender;
+
+/** Initialize an RTCRtpSender with a native RtpSenderInterface. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeRtpSender:(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.h
new file mode 100644
index 0000000000..fcdf199869
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCDtmfSender.h"
+#import "RTCMacros.h"
+#import "RTCMediaStreamTrack.h"
+#import "RTCRtpParameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCRtpSender)<NSObject>
+
+/** A unique identifier for this sender. */
+@property(nonatomic, readonly) NSString *senderId;
+
+/** The currently active RTCRtpParameters, as defined in
+ * https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters.
+ */
+@property(nonatomic, copy) RTC_OBJC_TYPE(RTCRtpParameters) * parameters;
+
+/** The RTCMediaStreamTrack associated with the sender.
+ * Note: reading this property returns a new instance of
+ * RTCMediaStreamTrack. Use isEqual: instead of == to compare
+ * RTCMediaStreamTrack instances.
+ */
+@property(nonatomic, copy, nullable) RTC_OBJC_TYPE(RTCMediaStreamTrack) * track;
+
+/** IDs of streams associated with the RTP sender */
+@property(nonatomic, copy) NSArray<NSString *> *streamIds;
+
+/** The RTCDtmfSender accociated with the RTP sender. */
+@property(nonatomic, readonly, nullable) id<RTC_OBJC_TYPE(RTCDtmfSender)> dtmfSender;
+
+@end
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpSender) : NSObject <RTC_OBJC_TYPE(RTCRtpSender)>
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.mm
new file mode 100644
index 0000000000..4fadb30f49
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpSender.mm
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpSender+Private.h"
+
+#import "RTCDtmfSender+Private.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCRtpParameters+Private.h"
+#import "RTCRtpSender+Native.h"
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+#include "api/media_stream_interface.h"
+
+@implementation RTC_OBJC_TYPE (RTCRtpSender) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ rtc::scoped_refptr<webrtc::RtpSenderInterface> _nativeRtpSender;
+}
+
+@synthesize dtmfSender = _dtmfSender;
+
+- (NSString *)senderId {
+ return [NSString stringForStdString:_nativeRtpSender->id()];
+}
+
+- (RTC_OBJC_TYPE(RTCRtpParameters) *)parameters {
+ return [[RTC_OBJC_TYPE(RTCRtpParameters) alloc]
+ initWithNativeParameters:_nativeRtpSender->GetParameters()];
+}
+
+- (void)setParameters:(RTC_OBJC_TYPE(RTCRtpParameters) *)parameters {
+ if (!_nativeRtpSender->SetParameters(parameters.nativeParameters).ok()) {
+ RTCLogError(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): Failed to set parameters: %@", self, parameters);
+ }
+}
+
+- (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
+ _nativeRtpSender->track());
+ if (nativeTrack) {
+ return [RTC_OBJC_TYPE(RTCMediaStreamTrack) mediaTrackForNativeTrack:nativeTrack
+ factory:_factory];
+ }
+ return nil;
+}
+
+- (void)setTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
+ if (!_nativeRtpSender->SetTrack(track.nativeTrack.get())) {
+ RTCLogError(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): Failed to set track %@", self, track);
+ }
+}
+
+- (NSArray<NSString *> *)streamIds {
+ std::vector<std::string> nativeStreamIds = _nativeRtpSender->stream_ids();
+ NSMutableArray *streamIds = [NSMutableArray arrayWithCapacity:nativeStreamIds.size()];
+ for (const auto &s : nativeStreamIds) {
+ [streamIds addObject:[NSString stringForStdString:s]];
+ }
+ return streamIds;
+}
+
+- (void)setStreamIds:(NSArray<NSString *> *)streamIds {
+ std::vector<std::string> nativeStreamIds;
+ for (NSString *streamId in streamIds) {
+ nativeStreamIds.push_back([streamId UTF8String]);
+ }
+ _nativeRtpSender->SetStreams(nativeStreamIds);
+}
+
+- (NSString *)description {
+ return [NSString
+ stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpSender) {\n senderId: %@\n}", self.senderId];
+}
+
+- (BOOL)isEqual:(id)object {
+ if (self == object) {
+ return YES;
+ }
+ if (object == nil) {
+ return NO;
+ }
+ if (![object isMemberOfClass:[self class]]) {
+ return NO;
+ }
+ RTC_OBJC_TYPE(RTCRtpSender) *sender = (RTC_OBJC_TYPE(RTCRtpSender) *)object;
+ return _nativeRtpSender == sender.nativeRtpSender;
+}
+
+- (NSUInteger)hash {
+ return (NSUInteger)_nativeRtpSender.get();
+}
+
+#pragma mark - Native
+
+- (void)setFrameEncryptor:(rtc::scoped_refptr<webrtc::FrameEncryptorInterface>)frameEncryptor {
+ _nativeRtpSender->SetFrameEncryptor(frameEncryptor);
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
+ return _nativeRtpSender;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeRtpSender:(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
+ NSParameterAssert(factory);
+ NSParameterAssert(nativeRtpSender);
+ if (self = [super init]) {
+ _factory = factory;
+ _nativeRtpSender = nativeRtpSender;
+ if (_nativeRtpSender->media_type() == cricket::MEDIA_TYPE_AUDIO) {
+ rtc::scoped_refptr<webrtc::DtmfSenderInterface> nativeDtmfSender(
+ _nativeRtpSender->GetDtmfSender());
+ if (nativeDtmfSender) {
+ _dtmfSender =
+ [[RTC_OBJC_TYPE(RTCDtmfSender) alloc] initWithNativeDtmfSender:nativeDtmfSender];
+ }
+ }
+ RTCLogInfo(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): created sender: %@", self, self.description);
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h
new file mode 100644
index 0000000000..65d45fb88e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpTransceiver.h"
+
+#include "api/rtp_transceiver_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+
+@interface RTC_OBJC_TYPE (RTCRtpTransceiverInit)
+()
+
+ @property(nonatomic, readonly) webrtc::RtpTransceiverInit nativeInit;
+
+@end
+
+@interface RTC_OBJC_TYPE (RTCRtpTransceiver)
+()
+
+ @property(nonatomic,
+ readonly) rtc::scoped_refptr<webrtc::RtpTransceiverInterface> nativeRtpTransceiver;
+
+/** Initialize an RTCRtpTransceiver with a native RtpTransceiverInterface. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeRtpTransceiver:
+ (rtc::scoped_refptr<webrtc::RtpTransceiverInterface>)nativeRtpTransceiver
+ NS_DESIGNATED_INITIALIZER;
+
++ (webrtc::RtpTransceiverDirection)nativeRtpTransceiverDirectionFromDirection:
+ (RTCRtpTransceiverDirection)direction;
+
++ (RTCRtpTransceiverDirection)rtpTransceiverDirectionFromNativeDirection:
+ (webrtc::RtpTransceiverDirection)nativeDirection;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.h
new file mode 100644
index 0000000000..fd59013639
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCRtpReceiver.h"
+#import "RTCRtpSender.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+extern NSString *const kRTCRtpTransceiverErrorDomain;
+
+/** https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverdirection */
+typedef NS_ENUM(NSInteger, RTCRtpTransceiverDirection) {
+ RTCRtpTransceiverDirectionSendRecv,
+ RTCRtpTransceiverDirectionSendOnly,
+ RTCRtpTransceiverDirectionRecvOnly,
+ RTCRtpTransceiverDirectionInactive,
+ RTCRtpTransceiverDirectionStopped
+};
+
+/** Structure for initializing an RTCRtpTransceiver in a call to
+ * RTCPeerConnection.addTransceiver.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverinit
+ */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpTransceiverInit) : NSObject
+
+/** Direction of the RTCRtpTransceiver. See RTCRtpTransceiver.direction. */
+@property(nonatomic) RTCRtpTransceiverDirection direction;
+
+/** The added RTCRtpTransceiver will be added to these streams. */
+@property(nonatomic) NSArray<NSString *> *streamIds;
+
+/** TODO(bugs.webrtc.org/7600): Not implemented. */
+@property(nonatomic) NSArray<RTC_OBJC_TYPE(RTCRtpEncodingParameters) *> *sendEncodings;
+
+@end
+
+@class RTC_OBJC_TYPE(RTCRtpTransceiver);
+
+/** The RTCRtpTransceiver maps to the RTCRtpTransceiver defined by the
+ * WebRTC specification. A transceiver represents a combination of an RTCRtpSender
+ * and an RTCRtpReceiver that share a common mid. As defined in JSEP, an
+ * RTCRtpTransceiver is said to be associated with a media description if its
+ * mid property is non-nil; otherwise, it is said to be disassociated.
+ * JSEP: https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24
+ *
+ * Note that RTCRtpTransceivers are only supported when using
+ * RTCPeerConnection with Unified Plan SDP.
+ *
+ * WebRTC specification for RTCRtpTransceiver, the JavaScript analog:
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver
+ */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCRtpTransceiver)<NSObject>
+
+ /** Media type of the transceiver. The sender and receiver will also have this
+ * type.
+ */
+ @property(nonatomic, readonly) RTCRtpMediaType mediaType;
+
+/** The mid attribute is the mid negotiated and present in the local and
+ * remote descriptions. Before negotiation is complete, the mid value may be
+ * nil. After rollbacks, the value may change from a non-nil value to nil.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-mid
+ */
+@property(nonatomic, readonly) NSString *mid;
+
+/** The sender attribute exposes the RTCRtpSender corresponding to the RTP
+ * media that may be sent with the transceiver's mid. The sender is always
+ * present, regardless of the direction of media.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-sender
+ */
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpSender) * sender;
+
+/** The receiver attribute exposes the RTCRtpReceiver corresponding to the RTP
+ * media that may be received with the transceiver's mid. The receiver is
+ * always present, regardless of the direction of media.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-receiver
+ */
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpReceiver) * receiver;
+
+/** The isStopped attribute indicates that the sender of this transceiver will
+ * no longer send, and that the receiver will no longer receive. It is true if
+ * either stop has been called or if setting the local or remote description
+ * has caused the RTCRtpTransceiver to be stopped.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stopped
+ */
+@property(nonatomic, readonly) BOOL isStopped;
+
+/** The direction attribute indicates the preferred direction of this
+ * transceiver, which will be used in calls to createOffer and createAnswer.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction
+ */
+@property(nonatomic, readonly) RTCRtpTransceiverDirection direction;
+
+/** The currentDirection attribute indicates the current direction negotiated
+ * for this transceiver. If this transceiver has never been represented in an
+ * offer/answer exchange, or if the transceiver is stopped, the value is not
+ * present and this method returns NO.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-currentdirection
+ */
+- (BOOL)currentDirection:(RTCRtpTransceiverDirection *)currentDirectionOut;
+
+/** The stop method irreversibly stops the RTCRtpTransceiver. The sender of
+ * this transceiver will no longer send, the receiver will no longer receive.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stop
+ */
+- (void)stopInternal;
+
+/** An update of directionality does not take effect immediately. Instead,
+ * future calls to createOffer and createAnswer mark the corresponding media
+ * descriptions as sendrecv, sendonly, recvonly, or inactive.
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction
+ */
+- (void)setDirection:(RTCRtpTransceiverDirection)direction error:(NSError **)error;
+
+@end
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCRtpTransceiver) : NSObject <RTC_OBJC_TYPE(RTCRtpTransceiver)>
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm
new file mode 100644
index 0000000000..ae1cf79864
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm
@@ -0,0 +1,190 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpTransceiver+Private.h"
+
+#import "RTCRtpEncodingParameters+Private.h"
+#import "RTCRtpParameters+Private.h"
+#import "RTCRtpReceiver+Private.h"
+#import "RTCRtpSender+Private.h"
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+NSString *const kRTCRtpTransceiverErrorDomain = @"org.webrtc.RTCRtpTranceiver";
+
+@implementation RTC_OBJC_TYPE (RTCRtpTransceiverInit)
+
+@synthesize direction = _direction;
+@synthesize streamIds = _streamIds;
+@synthesize sendEncodings = _sendEncodings;
+
+- (instancetype)init {
+ if (self = [super init]) {
+ _direction = RTCRtpTransceiverDirectionSendRecv;
+ }
+ return self;
+}
+
+- (webrtc::RtpTransceiverInit)nativeInit {
+ webrtc::RtpTransceiverInit init;
+ init.direction =
+ [RTC_OBJC_TYPE(RTCRtpTransceiver) nativeRtpTransceiverDirectionFromDirection:_direction];
+ for (NSString *streamId in _streamIds) {
+ init.stream_ids.push_back([streamId UTF8String]);
+ }
+ for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * sendEncoding in _sendEncodings) {
+ init.send_encodings.push_back(sendEncoding.nativeParameters);
+ }
+ return init;
+}
+
+@end
+
+@implementation RTC_OBJC_TYPE (RTCRtpTransceiver) {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+ rtc::scoped_refptr<webrtc::RtpTransceiverInterface> _nativeRtpTransceiver;
+}
+
+- (RTCRtpMediaType)mediaType {
+ return [RTC_OBJC_TYPE(RTCRtpReceiver)
+ mediaTypeForNativeMediaType:_nativeRtpTransceiver->media_type()];
+}
+
+- (NSString *)mid {
+ if (_nativeRtpTransceiver->mid()) {
+ return [NSString stringForStdString:*_nativeRtpTransceiver->mid()];
+ } else {
+ return nil;
+ }
+}
+
+@synthesize sender = _sender;
+@synthesize receiver = _receiver;
+
+- (BOOL)isStopped {
+ return _nativeRtpTransceiver->stopped();
+}
+
+- (RTCRtpTransceiverDirection)direction {
+ return [RTC_OBJC_TYPE(RTCRtpTransceiver)
+ rtpTransceiverDirectionFromNativeDirection:_nativeRtpTransceiver->direction()];
+}
+
+- (void)setDirection:(RTCRtpTransceiverDirection)direction error:(NSError **)error {
+ webrtc::RTCError nativeError = _nativeRtpTransceiver->SetDirectionWithError(
+ [RTC_OBJC_TYPE(RTCRtpTransceiver) nativeRtpTransceiverDirectionFromDirection:direction]);
+
+ if (!nativeError.ok() && error) {
+ *error = [NSError errorWithDomain:kRTCRtpTransceiverErrorDomain
+ code:static_cast<int>(nativeError.type())
+ userInfo:@{
+ @"message" : [NSString stringWithCString:nativeError.message()
+ encoding:NSUTF8StringEncoding]
+ }];
+ }
+}
+
+- (BOOL)currentDirection:(RTCRtpTransceiverDirection *)currentDirectionOut {
+ if (_nativeRtpTransceiver->current_direction()) {
+ *currentDirectionOut = [RTC_OBJC_TYPE(RTCRtpTransceiver)
+ rtpTransceiverDirectionFromNativeDirection:*_nativeRtpTransceiver->current_direction()];
+ return YES;
+ } else {
+ return NO;
+ }
+}
+
+- (void)stopInternal {
+ _nativeRtpTransceiver->StopInternal();
+}
+
+- (NSString *)description {
+ return [NSString
+ stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpTransceiver) {\n sender: %@\n receiver: %@\n}",
+ _sender,
+ _receiver];
+}
+
+- (BOOL)isEqual:(id)object {
+ if (self == object) {
+ return YES;
+ }
+ if (object == nil) {
+ return NO;
+ }
+ if (![object isMemberOfClass:[self class]]) {
+ return NO;
+ }
+ RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver = (RTC_OBJC_TYPE(RTCRtpTransceiver) *)object;
+ return _nativeRtpTransceiver == transceiver.nativeRtpTransceiver;
+}
+
+- (NSUInteger)hash {
+ return (NSUInteger)_nativeRtpTransceiver.get();
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::RtpTransceiverInterface>)nativeRtpTransceiver {
+ return _nativeRtpTransceiver;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeRtpTransceiver:
+ (rtc::scoped_refptr<webrtc::RtpTransceiverInterface>)nativeRtpTransceiver {
+ NSParameterAssert(factory);
+ NSParameterAssert(nativeRtpTransceiver);
+ if (self = [super init]) {
+ _factory = factory;
+ _nativeRtpTransceiver = nativeRtpTransceiver;
+ _sender = [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:_factory
+ nativeRtpSender:nativeRtpTransceiver->sender()];
+ _receiver =
+ [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:_factory
+ nativeRtpReceiver:nativeRtpTransceiver->receiver()];
+ RTCLogInfo(
+ @"RTC_OBJC_TYPE(RTCRtpTransceiver)(%p): created transceiver: %@", self, self.description);
+ }
+ return self;
+}
+
++ (webrtc::RtpTransceiverDirection)nativeRtpTransceiverDirectionFromDirection:
+ (RTCRtpTransceiverDirection)direction {
+ switch (direction) {
+ case RTCRtpTransceiverDirectionSendRecv:
+ return webrtc::RtpTransceiverDirection::kSendRecv;
+ case RTCRtpTransceiverDirectionSendOnly:
+ return webrtc::RtpTransceiverDirection::kSendOnly;
+ case RTCRtpTransceiverDirectionRecvOnly:
+ return webrtc::RtpTransceiverDirection::kRecvOnly;
+ case RTCRtpTransceiverDirectionInactive:
+ return webrtc::RtpTransceiverDirection::kInactive;
+ case RTCRtpTransceiverDirectionStopped:
+ return webrtc::RtpTransceiverDirection::kStopped;
+ }
+}
+
++ (RTCRtpTransceiverDirection)rtpTransceiverDirectionFromNativeDirection:
+ (webrtc::RtpTransceiverDirection)nativeDirection {
+ switch (nativeDirection) {
+ case webrtc::RtpTransceiverDirection::kSendRecv:
+ return RTCRtpTransceiverDirectionSendRecv;
+ case webrtc::RtpTransceiverDirection::kSendOnly:
+ return RTCRtpTransceiverDirectionSendOnly;
+ case webrtc::RtpTransceiverDirection::kRecvOnly:
+ return RTCRtpTransceiverDirectionRecvOnly;
+ case webrtc::RtpTransceiverDirection::kInactive:
+ return RTCRtpTransceiverDirectionInactive;
+ case webrtc::RtpTransceiverDirection::kStopped:
+ return RTCRtpTransceiverDirectionStopped;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.h
new file mode 100644
index 0000000000..f68bc5e9e3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+/**
+ * Initialize and clean up the SSL library. Failure is fatal. These call the
+ * corresponding functions in webrtc/rtc_base/ssladapter.h.
+ */
+RTC_EXTERN BOOL RTCInitializeSSL(void);
+RTC_EXTERN BOOL RTCCleanupSSL(void);
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.mm
new file mode 100644
index 0000000000..430249577b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSSLAdapter.mm
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCSSLAdapter.h"
+
+#include "rtc_base/checks.h"
+#include "rtc_base/ssl_adapter.h"
+
+BOOL RTCInitializeSSL(void) {
+ BOOL initialized = rtc::InitializeSSL();
+ RTC_DCHECK(initialized);
+ return initialized;
+}
+
+BOOL RTCCleanupSSL(void) {
+ BOOL cleanedUp = rtc::CleanupSSL();
+ RTC_DCHECK(cleanedUp);
+ return cleanedUp;
+}
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h
new file mode 100644
index 0000000000..aa087e557f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCSessionDescription.h"
+
+#include "api/jsep.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCSessionDescription)
+()
+
+ /**
+ * The native SessionDescriptionInterface representation of this
+ * RTCSessionDescription object. This is needed to pass to the underlying C++
+ * APIs.
+ */
+ @property(nonatomic,
+ readonly) std::unique_ptr<webrtc::SessionDescriptionInterface> nativeDescription;
+
+/**
+ * Initialize an RTCSessionDescription from a native
+ * SessionDescriptionInterface. No ownership is taken of the native session
+ * description.
+ */
+- (instancetype)initWithNativeDescription:
+ (const webrtc::SessionDescriptionInterface *)nativeDescription;
+
++ (std::string)stdStringForType:(RTCSdpType)type;
+
++ (RTCSdpType)typeForStdString:(const std::string &)string;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.h
new file mode 100644
index 0000000000..8a9479d5cf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+/**
+ * Represents the session description type. This exposes the same types that are
+ * in C++, which doesn't include the rollback type that is in the W3C spec.
+ */
+typedef NS_ENUM(NSInteger, RTCSdpType) {
+ RTCSdpTypeOffer,
+ RTCSdpTypePrAnswer,
+ RTCSdpTypeAnswer,
+ RTCSdpTypeRollback,
+};
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCSessionDescription) : NSObject
+
+/** The type of session description. */
+@property(nonatomic, readonly) RTCSdpType type;
+
+/** The SDP string representation of this session description. */
+@property(nonatomic, readonly) NSString *sdp;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Initialize a session description with a type and SDP string. */
+- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp NS_DESIGNATED_INITIALIZER;
+
++ (NSString *)stringForType:(RTCSdpType)type;
+
++ (RTCSdpType)typeForString:(NSString *)string;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.mm
new file mode 100644
index 0000000000..539c90b14c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCSessionDescription.mm
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCSessionDescription+Private.h"
+
+#import "base/RTCLogging.h"
+#import "helpers/NSString+StdString.h"
+
+#include "rtc_base/checks.h"
+
+@implementation RTC_OBJC_TYPE (RTCSessionDescription)
+
+@synthesize type = _type;
+@synthesize sdp = _sdp;
+
++ (NSString *)stringForType:(RTCSdpType)type {
+ std::string string = [[self class] stdStringForType:type];
+ return [NSString stringForStdString:string];
+}
+
++ (RTCSdpType)typeForString:(NSString *)string {
+ std::string typeString = string.stdString;
+ return [[self class] typeForStdString:typeString];
+}
+
+- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp {
+ if (self = [super init]) {
+ _type = type;
+ _sdp = [sdp copy];
+ }
+ return self;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCSessionDescription):\n%@\n%@",
+ [[self class] stringForType:_type],
+ _sdp];
+}
+
+#pragma mark - Private
+
+- (std::unique_ptr<webrtc::SessionDescriptionInterface>)nativeDescription {
+ webrtc::SdpParseError error;
+
+ std::unique_ptr<webrtc::SessionDescriptionInterface> description(webrtc::CreateSessionDescription(
+ [[self class] stdStringForType:_type], _sdp.stdString, &error));
+
+ if (!description) {
+ RTCLogError(@"Failed to create session description: %s\nline: %s",
+ error.description.c_str(),
+ error.line.c_str());
+ }
+
+ return description;
+}
+
+- (instancetype)initWithNativeDescription:
+ (const webrtc::SessionDescriptionInterface *)nativeDescription {
+ NSParameterAssert(nativeDescription);
+ std::string sdp;
+ nativeDescription->ToString(&sdp);
+ RTCSdpType type = [[self class] typeForStdString:nativeDescription->type()];
+
+ return [self initWithType:type
+ sdp:[NSString stringForStdString:sdp]];
+}
+
++ (std::string)stdStringForType:(RTCSdpType)type {
+ switch (type) {
+ case RTCSdpTypeOffer:
+ return webrtc::SessionDescriptionInterface::kOffer;
+ case RTCSdpTypePrAnswer:
+ return webrtc::SessionDescriptionInterface::kPrAnswer;
+ case RTCSdpTypeAnswer:
+ return webrtc::SessionDescriptionInterface::kAnswer;
+ case RTCSdpTypeRollback:
+ return webrtc::SessionDescriptionInterface::kRollback;
+ }
+}
+
++ (RTCSdpType)typeForStdString:(const std::string &)string {
+ if (string == webrtc::SessionDescriptionInterface::kOffer) {
+ return RTCSdpTypeOffer;
+ } else if (string == webrtc::SessionDescriptionInterface::kPrAnswer) {
+ return RTCSdpTypePrAnswer;
+ } else if (string == webrtc::SessionDescriptionInterface::kAnswer) {
+ return RTCSdpTypeAnswer;
+ } else if (string == webrtc::SessionDescriptionInterface::kRollback) {
+ return RTCSdpTypeRollback;
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ return RTCSdpTypeOffer;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h
new file mode 100644
index 0000000000..47c5241d51
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h
@@ -0,0 +1,19 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCStatisticsReport.h"
+
+#include "api/stats/rtc_stats_report.h"
+
+@interface RTC_OBJC_TYPE (RTCStatisticsReport) (Private)
+
+- (instancetype)initWithReport : (const webrtc::RTCStatsReport &)report;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.h
new file mode 100644
index 0000000000..06dbf48d88
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCStatistics);
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** A statistics report. Encapsulates a number of RTCStatistics objects. */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCStatisticsReport) : NSObject
+
+/** The timestamp of the report in microseconds since 1970-01-01T00:00:00Z. */
+@property(nonatomic, readonly) CFTimeInterval timestamp_us;
+
+/** RTCStatistics objects by id. */
+@property(nonatomic, readonly) NSDictionary<NSString *, RTC_OBJC_TYPE(RTCStatistics) *> *statistics;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+/** A part of a report (a subreport) covering a certain area. */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCStatistics) : NSObject
+
+/** The id of this subreport, e.g. "RTCMediaStreamTrack_receiver_2". */
+@property(nonatomic, readonly) NSString *id;
+
+/** The timestamp of the subreport in microseconds since 1970-01-01T00:00:00Z. */
+@property(nonatomic, readonly) CFTimeInterval timestamp_us;
+
+/** The type of the subreport, e.g. "track", "codec". */
+@property(nonatomic, readonly) NSString *type;
+
+/** The keys and values of the subreport, e.g. "totalFramesDuration = 5.551".
+ The values are either NSNumbers or NSStrings or NSArrays encapsulating NSNumbers
+ or NSStrings, or NSDictionary of NSString keys to NSNumber values. */
+@property(nonatomic, readonly) NSDictionary<NSString *, NSObject *> *values;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.mm
new file mode 100644
index 0000000000..28ef326b99
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCStatisticsReport.mm
@@ -0,0 +1,193 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCStatisticsReport+Private.h"
+
+#include "helpers/NSString+StdString.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+/** Converts a single value to a suitable NSNumber, NSString or NSArray containing NSNumbers
+ or NSStrings, or NSDictionary of NSString keys to NSNumber values.*/
+NSObject *ValueFromStatsMember(const RTCStatsMemberInterface *member) {
+ if (member->is_defined()) {
+ switch (member->type()) {
+ case RTCStatsMemberInterface::kBool:
+ return [NSNumber numberWithBool:*member->cast_to<RTCStatsMember<bool>>()];
+ case RTCStatsMemberInterface::kInt32:
+ return [NSNumber numberWithInt:*member->cast_to<RTCStatsMember<int32_t>>()];
+ case RTCStatsMemberInterface::kUint32:
+ return [NSNumber numberWithUnsignedInt:*member->cast_to<RTCStatsMember<uint32_t>>()];
+ case RTCStatsMemberInterface::kInt64:
+ return [NSNumber numberWithLong:*member->cast_to<RTCStatsMember<int64_t>>()];
+ case RTCStatsMemberInterface::kUint64:
+ return [NSNumber numberWithUnsignedLong:*member->cast_to<RTCStatsMember<uint64_t>>()];
+ case RTCStatsMemberInterface::kDouble:
+ return [NSNumber numberWithDouble:*member->cast_to<RTCStatsMember<double>>()];
+ case RTCStatsMemberInterface::kString:
+ return [NSString stringForStdString:*member->cast_to<RTCStatsMember<std::string>>()];
+ case RTCStatsMemberInterface::kSequenceBool: {
+ std::vector<bool> sequence = *member->cast_to<RTCStatsMember<std::vector<bool>>>();
+ NSMutableArray *array = [NSMutableArray arrayWithCapacity:sequence.size()];
+ for (auto item : sequence) {
+ [array addObject:[NSNumber numberWithBool:item]];
+ }
+ return [array copy];
+ }
+ case RTCStatsMemberInterface::kSequenceInt32: {
+ std::vector<int32_t> sequence = *member->cast_to<RTCStatsMember<std::vector<int32_t>>>();
+ NSMutableArray<NSNumber *> *array = [NSMutableArray arrayWithCapacity:sequence.size()];
+ for (const auto &item : sequence) {
+ [array addObject:[NSNumber numberWithInt:item]];
+ }
+ return [array copy];
+ }
+ case RTCStatsMemberInterface::kSequenceUint32: {
+ std::vector<uint32_t> sequence = *member->cast_to<RTCStatsMember<std::vector<uint32_t>>>();
+ NSMutableArray<NSNumber *> *array = [NSMutableArray arrayWithCapacity:sequence.size()];
+ for (const auto &item : sequence) {
+ [array addObject:[NSNumber numberWithUnsignedInt:item]];
+ }
+ return [array copy];
+ }
+ case RTCStatsMemberInterface::kSequenceInt64: {
+ std::vector<int64_t> sequence = *member->cast_to<RTCStatsMember<std::vector<int64_t>>>();
+ NSMutableArray<NSNumber *> *array = [NSMutableArray arrayWithCapacity:sequence.size()];
+ for (const auto &item : sequence) {
+ [array addObject:[NSNumber numberWithLong:item]];
+ }
+ return [array copy];
+ }
+ case RTCStatsMemberInterface::kSequenceUint64: {
+ std::vector<uint64_t> sequence = *member->cast_to<RTCStatsMember<std::vector<uint64_t>>>();
+ NSMutableArray<NSNumber *> *array = [NSMutableArray arrayWithCapacity:sequence.size()];
+ for (const auto &item : sequence) {
+ [array addObject:[NSNumber numberWithUnsignedLong:item]];
+ }
+ return [array copy];
+ }
+ case RTCStatsMemberInterface::kSequenceDouble: {
+ std::vector<double> sequence = *member->cast_to<RTCStatsMember<std::vector<double>>>();
+ NSMutableArray<NSNumber *> *array = [NSMutableArray arrayWithCapacity:sequence.size()];
+ for (const auto &item : sequence) {
+ [array addObject:[NSNumber numberWithDouble:item]];
+ }
+ return [array copy];
+ }
+ case RTCStatsMemberInterface::kSequenceString: {
+ std::vector<std::string> sequence =
+ *member->cast_to<RTCStatsMember<std::vector<std::string>>>();
+ NSMutableArray<NSString *> *array = [NSMutableArray arrayWithCapacity:sequence.size()];
+ for (const auto &item : sequence) {
+ [array addObject:[NSString stringForStdString:item]];
+ }
+ return [array copy];
+ }
+ case RTCStatsMemberInterface::kMapStringUint64: {
+ std::map<std::string, uint64_t> map =
+ *member->cast_to<RTCStatsMember<std::map<std::string, uint64_t>>>();
+ NSMutableDictionary<NSString *, NSNumber *> *dictionary =
+ [NSMutableDictionary dictionaryWithCapacity:map.size()];
+ for (const auto &item : map) {
+ dictionary[[NSString stringForStdString:item.first]] = @(item.second);
+ }
+ return [dictionary copy];
+ }
+ case RTCStatsMemberInterface::kMapStringDouble: {
+ std::map<std::string, double> map =
+ *member->cast_to<RTCStatsMember<std::map<std::string, double>>>();
+ NSMutableDictionary<NSString *, NSNumber *> *dictionary =
+ [NSMutableDictionary dictionaryWithCapacity:map.size()];
+ for (const auto &item : map) {
+ dictionary[[NSString stringForStdString:item.first]] = @(item.second);
+ }
+ return [dictionary copy];
+ }
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+ }
+
+ return nil;
+}
+} // namespace webrtc
+
+@implementation RTC_OBJC_TYPE (RTCStatistics)
+
+@synthesize id = _id;
+@synthesize timestamp_us = _timestamp_us;
+@synthesize type = _type;
+@synthesize values = _values;
+
+- (instancetype)initWithStatistics:(const webrtc::RTCStats &)statistics {
+ if (self = [super init]) {
+ _id = [NSString stringForStdString:statistics.id()];
+ _timestamp_us = statistics.timestamp_us();
+ _type = [NSString stringWithCString:statistics.type() encoding:NSUTF8StringEncoding];
+
+ NSMutableDictionary<NSString *, NSObject *> *values = [NSMutableDictionary dictionary];
+ for (const webrtc::RTCStatsMemberInterface *member : statistics.Members()) {
+ NSObject *value = ValueFromStatsMember(member);
+ if (value) {
+ NSString *name = [NSString stringWithCString:member->name() encoding:NSUTF8StringEncoding];
+ RTC_DCHECK(name.length > 0);
+ RTC_DCHECK(!values[name]);
+ values[name] = value;
+ }
+ }
+ _values = [values copy];
+ }
+
+ return self;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"id = %@, type = %@, timestamp = %.0f, values = %@",
+ self.id,
+ self.type,
+ self.timestamp_us,
+ self.values];
+}
+
+@end
+
+@implementation RTC_OBJC_TYPE (RTCStatisticsReport)
+
+@synthesize timestamp_us = _timestamp_us;
+@synthesize statistics = _statistics;
+
+- (NSString *)description {
+ return [NSString
+ stringWithFormat:@"timestamp = %.0f, statistics = %@", self.timestamp_us, self.statistics];
+}
+
+@end
+
+@implementation RTC_OBJC_TYPE (RTCStatisticsReport) (Private)
+
+- (instancetype)initWithReport : (const webrtc::RTCStatsReport &)report {
+ if (self = [super init]) {
+ _timestamp_us = report.timestamp_us();
+
+ NSMutableDictionary *statisticsById =
+ [NSMutableDictionary dictionaryWithCapacity:report.size()];
+ for (const auto &stat : report) {
+ RTC_OBJC_TYPE(RTCStatistics) *statistics =
+ [[RTC_OBJC_TYPE(RTCStatistics) alloc] initWithStatistics:stat];
+ statisticsById[statistics.id] = statistics;
+ }
+ _statistics = [statisticsById copy];
+ }
+
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.h
new file mode 100644
index 0000000000..5c66e5a63a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+RTC_EXTERN void RTCSetupInternalTracer(void);
+/** Starts capture to specified file. Must be a valid writable path.
+ * Returns YES if capture starts.
+ */
+RTC_EXTERN BOOL RTCStartInternalCapture(NSString* filePath);
+RTC_EXTERN void RTCStopInternalCapture(void);
+RTC_EXTERN void RTCShutdownInternalTracer(void);
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.mm
new file mode 100644
index 0000000000..72f9f4da13
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCTracing.mm
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCTracing.h"
+
+#include "rtc_base/event_tracer.h"
+
+void RTCSetupInternalTracer(void) {
+ rtc::tracing::SetupInternalTracer();
+}
+
+BOOL RTCStartInternalCapture(NSString *filePath) {
+ return rtc::tracing::StartInternalCapture(filePath.UTF8String);
+}
+
+void RTCStopInternalCapture(void) {
+ rtc::tracing::StopInternalCapture();
+}
+
+void RTCShutdownInternalTracer(void) {
+ rtc::tracing::ShutdownInternalTracer();
+}
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h
new file mode 100644
index 0000000000..5eff996c4f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "base/RTCVideoCodecInfo.h"
+
+#include "api/video_codecs/sdp_video_format.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/* Interface for converting to/from internal C++ formats. */
+@interface RTC_OBJC_TYPE (RTCVideoCodecInfo)
+(Private)
+
+ - (instancetype)initWithNativeSdpVideoFormat : (webrtc::SdpVideoFormat)format;
+- (webrtc::SdpVideoFormat)nativeSdpVideoFormat;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm
new file mode 100644
index 0000000000..2eb8d366d2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoCodecInfo+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo)
+(Private)
+
+ - (instancetype)initWithNativeSdpVideoFormat : (webrtc::SdpVideoFormat)format {
+ NSMutableDictionary *params = [NSMutableDictionary dictionary];
+ for (auto it = format.parameters.begin(); it != format.parameters.end(); ++it) {
+ [params setObject:[NSString stringForStdString:it->second]
+ forKey:[NSString stringForStdString:it->first]];
+ }
+ return [self initWithName:[NSString stringForStdString:format.name] parameters:params];
+}
+
+- (webrtc::SdpVideoFormat)nativeSdpVideoFormat {
+ std::map<std::string, std::string> parameters;
+ for (NSString *paramKey in self.parameters.allKeys) {
+ std::string key = [NSString stdStringForString:paramKey];
+ std::string value = [NSString stdStringForString:self.parameters[paramKey]];
+ parameters[key] = value;
+ }
+
+ return webrtc::SdpVideoFormat([NSString stdStringForString:self.name], parameters);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h
new file mode 100644
index 0000000000..8323b18dc1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "base/RTCVideoEncoderSettings.h"
+
+#include "modules/video_coding/include/video_codec_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/* Interfaces for converting to/from internal C++ formats. */
+@interface RTC_OBJC_TYPE (RTCVideoEncoderSettings)
+(Private)
+
+ - (instancetype)initWithNativeVideoCodec : (const webrtc::VideoCodec *__nullable)videoCodec;
+- (webrtc::VideoCodec)nativeVideoCodec;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm
new file mode 100644
index 0000000000..dec3a61090
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoEncoderSettings+Private.h"
+
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderSettings)
+(Private)
+
+ - (instancetype)initWithNativeVideoCodec : (const webrtc::VideoCodec *)videoCodec {
+ if (self = [super init]) {
+ if (videoCodec) {
+ const char *codecName = CodecTypeToPayloadString(videoCodec->codecType);
+ self.name = [NSString stringWithUTF8String:codecName];
+
+ self.width = videoCodec->width;
+ self.height = videoCodec->height;
+ self.startBitrate = videoCodec->startBitrate;
+ self.maxBitrate = videoCodec->maxBitrate;
+ self.minBitrate = videoCodec->minBitrate;
+ self.maxFramerate = videoCodec->maxFramerate;
+ self.qpMax = videoCodec->qpMax;
+ self.mode = (RTCVideoCodecMode)videoCodec->mode;
+ }
+ }
+
+ return self;
+}
+
+- (webrtc::VideoCodec)nativeVideoCodec {
+ webrtc::VideoCodec videoCodec;
+ videoCodec.width = self.width;
+ videoCodec.height = self.height;
+ videoCodec.startBitrate = self.startBitrate;
+ videoCodec.maxBitrate = self.maxBitrate;
+ videoCodec.minBitrate = self.minBitrate;
+ videoCodec.maxBitrate = self.maxBitrate;
+ videoCodec.qpMax = self.qpMax;
+ videoCodec.mode = (webrtc::VideoCodecMode)self.mode;
+
+ return videoCodec;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource+Private.h
new file mode 100644
index 0000000000..8e475dd21e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource+Private.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoSource.h"
+
+#import "RTCMediaSource+Private.h"
+
+#include "api/media_stream_interface.h"
+#include "rtc_base/thread.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCVideoSource)
+()
+
+ /**
+ * The VideoTrackSourceInterface object passed to this RTCVideoSource during
+ * construction.
+ */
+ @property(nonatomic,
+ readonly) rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeVideoSource;
+
+/** Initialize an RTCVideoSource from a native VideoTrackSourceInterface. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeVideoSource:
+ (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource
+ NS_DESIGNATED_INITIALIZER;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
+ type:(RTCMediaSourceType)type NS_UNAVAILABLE;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ signalingThread:(rtc::Thread *)signalingThread
+ workerThread:(rtc::Thread *)workerThread;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ signalingThread:(rtc::Thread *)signalingThread
+ workerThread:(rtc::Thread *)workerThread
+ isScreenCast:(BOOL)isScreenCast;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.h
new file mode 100644
index 0000000000..cdef8b89a1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCMediaSource.h"
+#import "RTCVideoCapturer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+
+@interface RTC_OBJC_TYPE (RTCVideoSource) : RTC_OBJC_TYPE(RTCMediaSource) <RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Calling this function will cause frames to be scaled down to the
+ * requested resolution. Also, frames will be cropped to match the
+ * requested aspect ratio, and frames will be dropped to match the
+ * requested fps. The requested aspect ratio is orientation agnostic and
+ * will be adjusted to maintain the input orientation, so it doesn't
+ * matter if e.g. 1280x720 or 720x1280 is requested.
+ */
+- (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.mm
new file mode 100644
index 0000000000..486ca93771
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoSource.mm
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoSource+Private.h"
+
+#include "pc/video_track_source_proxy.h"
+#include "rtc_base/checks.h"
+#include "sdk/objc/native/src/objc_video_track_source.h"
+
+static webrtc::ObjCVideoTrackSource *getObjCVideoSource(
+ const rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeSource) {
+ webrtc::VideoTrackSourceProxy *proxy_source =
+ static_cast<webrtc::VideoTrackSourceProxy *>(nativeSource.get());
+ return static_cast<webrtc::ObjCVideoTrackSource *>(proxy_source->internal());
+}
+
+// TODO(magjed): Refactor this class and target ObjCVideoTrackSource only once
+// RTCAVFoundationVideoSource is gone. See http://crbug/webrtc/7177 for more
+// info.
+@implementation RTC_OBJC_TYPE (RTCVideoSource) {
+ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeVideoSource:
+ (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
+ RTC_DCHECK(factory);
+ RTC_DCHECK(nativeVideoSource);
+ if (self = [super initWithFactory:factory
+ nativeMediaSource:nativeVideoSource
+ type:RTCMediaSourceTypeVideo]) {
+ _nativeVideoSource = nativeVideoSource;
+ }
+ return self;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
+ type:(RTCMediaSourceType)type {
+ RTC_DCHECK_NOTREACHED();
+ return nil;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ signalingThread:(rtc::Thread *)signalingThread
+ workerThread:(rtc::Thread *)workerThread {
+ return [self initWithFactory:factory
+ signalingThread:signalingThread
+ workerThread:workerThread
+ isScreenCast:NO];
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ signalingThread:(rtc::Thread *)signalingThread
+ workerThread:(rtc::Thread *)workerThread
+ isScreenCast:(BOOL)isScreenCast {
+ rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource =
+ rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>(isScreenCast);
+
+ return [self initWithFactory:factory
+ nativeVideoSource:webrtc::VideoTrackSourceProxy::Create(
+ signalingThread, workerThread, objCVideoTrackSource)];
+}
+
+- (NSString *)description {
+ NSString *stateString = [[self class] stringForState:self.state];
+ return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCVideoSource)( %p ): %@", self, stateString];
+}
+
+- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer
+ didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ getObjCVideoSource(_nativeVideoSource)->OnCapturedFrame(frame);
+}
+
+- (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps {
+ getObjCVideoSource(_nativeVideoSource)->OnOutputFormatRequest(width, height, fps);
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
+ return _nativeVideoSource;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h
new file mode 100644
index 0000000000..f1a8d7e4ed
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoTrack.h"
+
+#include "api/media_stream_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCVideoTrack)
+()
+
+ /** VideoTrackInterface created or passed in at construction. */
+ @property(nonatomic, readonly) rtc::scoped_refptr<webrtc::VideoTrackInterface> nativeVideoTrack;
+
+/** Initialize an RTCVideoTrack with its source and an id. */
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ source:(RTC_OBJC_TYPE(RTCVideoSource) *)source
+ trackId:(NSString *)trackId;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.h b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.h
new file mode 100644
index 0000000000..5382b7169f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMediaStreamTrack.h"
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@protocol RTC_OBJC_TYPE
+(RTCVideoRenderer);
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+@class RTC_OBJC_TYPE(RTCVideoSource);
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoTrack) : RTC_OBJC_TYPE(RTCMediaStreamTrack)
+
+/** The video source for this video track. */
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCVideoSource) *source;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Register a renderer that will render all frames received on this track. */
+- (void)addRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer;
+
+/** Deregister a renderer. */
+- (void)removeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.mm
new file mode 100644
index 0000000000..fb015c6207
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/peerconnection/RTCVideoTrack.mm
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoTrack+Private.h"
+
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCVideoSource+Private.h"
+#import "api/RTCVideoRendererAdapter+Private.h"
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoTrack) {
+ rtc::Thread *_workerThread;
+ NSMutableArray *_adapters /* accessed on _workerThread */;
+}
+
+@synthesize source = _source;
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ source:(RTC_OBJC_TYPE(RTCVideoSource) *)source
+ trackId:(NSString *)trackId {
+ NSParameterAssert(factory);
+ NSParameterAssert(source);
+ NSParameterAssert(trackId.length);
+ std::string nativeId = [NSString stdStringForString:trackId];
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
+ factory.nativeFactory->CreateVideoTrack(nativeId, source.nativeVideoSource.get());
+ if (self = [self initWithFactory:factory nativeTrack:track type:RTCMediaStreamTrackTypeVideo]) {
+ _source = source;
+ }
+ return self;
+}
+
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+ nativeTrack:
+ (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeMediaTrack
+ type:(RTCMediaStreamTrackType)type {
+ NSParameterAssert(factory);
+ NSParameterAssert(nativeMediaTrack);
+ NSParameterAssert(type == RTCMediaStreamTrackTypeVideo);
+ if (self = [super initWithFactory:factory nativeTrack:nativeMediaTrack type:type]) {
+ _adapters = [NSMutableArray array];
+ _workerThread = factory.workerThread;
+ }
+ return self;
+}
+
+- (void)dealloc {
+ for (RTCVideoRendererAdapter *adapter in _adapters) {
+ self.nativeVideoTrack->RemoveSink(adapter.nativeVideoRenderer);
+ }
+}
+
+- (RTC_OBJC_TYPE(RTCVideoSource) *)source {
+ if (!_source) {
+ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source(
+ self.nativeVideoTrack->GetSource());
+ if (source) {
+ _source = [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self.factory
+ nativeVideoSource:source];
+ }
+ }
+ return _source;
+}
+
+- (void)addRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer {
+ if (!_workerThread->IsCurrent()) {
+ _workerThread->Invoke<void>(RTC_FROM_HERE, [renderer, self] { [self addRenderer:renderer]; });
+ return;
+ }
+
+ // Make sure we don't have this renderer yet.
+ for (RTCVideoRendererAdapter *adapter in _adapters) {
+ if (adapter.videoRenderer == renderer) {
+ RTC_LOG(LS_INFO) << "|renderer| is already attached to this track";
+ return;
+ }
+ }
+ // Create a wrapper that provides a native pointer for us.
+ RTCVideoRendererAdapter* adapter =
+ [[RTCVideoRendererAdapter alloc] initWithNativeRenderer:renderer];
+ [_adapters addObject:adapter];
+ self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer,
+ rtc::VideoSinkWants());
+}
+
+- (void)removeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer {
+ if (!_workerThread->IsCurrent()) {
+ _workerThread->Invoke<void>(RTC_FROM_HERE,
+ [renderer, self] { [self removeRenderer:renderer]; });
+ return;
+ }
+ __block NSUInteger indexToRemove = NSNotFound;
+ [_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter,
+ NSUInteger idx,
+ BOOL *stop) {
+ if (adapter.videoRenderer == renderer) {
+ indexToRemove = idx;
+ *stop = YES;
+ }
+ }];
+ if (indexToRemove == NSNotFound) {
+ RTC_LOG(LS_INFO) << "removeRenderer called with a renderer that has not been previously added";
+ return;
+ }
+ RTCVideoRendererAdapter *adapterToRemove =
+ [_adapters objectAtIndex:indexToRemove];
+ self.nativeVideoTrack->RemoveSink(adapterToRemove.nativeVideoRenderer);
+ [_adapters removeObjectAtIndex:indexToRemove];
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::VideoTrackInterface>)nativeVideoTrack {
+ return rtc::scoped_refptr<webrtc::VideoTrackInterface>(
+ static_cast<webrtc::VideoTrackInterface *>(self.nativeTrack.get()));
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.h
new file mode 100644
index 0000000000..8b17a75aef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+RTC_EXTERN NSString* const kRTCVideoCodecVp8Name;
+RTC_EXTERN NSString* const kRTCVideoCodecVp9Name;
+RTC_EXTERN NSString* const kRTCVideoCodecAv1Name;
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm
new file mode 100644
index 0000000000..1ab236a2c2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import "RTCVideoCodecConstants.h"
+
+#include "media/base/media_constants.h"
+
+NSString *const kRTCVideoCodecVp8Name = @(cricket::kVp8CodecName);
+NSString *const kRTCVideoCodecVp9Name = @(cricket::kVp9CodecName);
+NSString *const kRTCVideoCodecAv1Name = @(cricket::kAv1CodecName);
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.h
new file mode 100644
index 0000000000..3f6a689564
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoDecoderAV1) : NSObject
+
+/* This returns a AV1 decoder that can be returned from a RTCVideoDecoderFactory injected into
+ * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be
+ * used independently from the RTCPeerConnectionFactory.
+ */
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)av1Decoder;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.mm
new file mode 100644
index 0000000000..81f5f93eec
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderAV1.mm
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoderAV1.h"
+#import "RTCWrappedNativeVideoDecoder.h"
+
+#include "modules/video_coding/codecs/av1/dav1d_decoder.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoDecoderAV1)
+
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)av1Decoder {
+ return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoDecoder) alloc]
+ initWithNativeDecoder:std::unique_ptr<webrtc::VideoDecoder>(webrtc::CreateDav1dDecoder())];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h
new file mode 100644
index 0000000000..a118b25ed7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoDecoderVP8) : NSObject
+
+/* This returns a VP8 decoder that can be returned from a RTCVideoDecoderFactory injected into
+ * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be
+ * used independently from the RTCPeerConnectionFactory.
+ */
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)vp8Decoder;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm
new file mode 100644
index 0000000000..c150cf6d3a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoderVP8.h"
+#import "RTCWrappedNativeVideoDecoder.h"
+
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoDecoderVP8)
+
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)vp8Decoder {
+ return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoDecoder) alloc]
+ initWithNativeDecoder:std::unique_ptr<webrtc::VideoDecoder>(webrtc::VP8Decoder::Create())];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h
new file mode 100644
index 0000000000..de7e62012b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoDecoderVP9) : NSObject
+
+/* This returns a VP9 decoder that can be returned from a RTCVideoDecoderFactory injected into
+ * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be
+ * used independently from the RTCPeerConnectionFactory.
+ */
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)vp9Decoder;
+
++ (bool)isSupported;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm
new file mode 100644
index 0000000000..05446d436d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoderVP9.h"
+#import "RTCWrappedNativeVideoDecoder.h"
+
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoDecoderVP9)
+
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)vp9Decoder {
+ std::unique_ptr<webrtc::VideoDecoder> nativeDecoder(webrtc::VP9Decoder::Create());
+ if (nativeDecoder == nullptr) {
+ return nil;
+ }
+ return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoDecoder) alloc]
+ initWithNativeDecoder:std::move(nativeDecoder)];
+}
+
++ (bool)isSupported {
+#if defined(RTC_ENABLE_VP9)
+ return true;
+#else
+ return false;
+#endif
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h
new file mode 100644
index 0000000000..8aa55e4bfa
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoEncoderAV1) : NSObject
+
+/* This returns a AV1 encoder that can be returned from a RTCVideoEncoderFactory injected into
+ * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be
+ * used independently from the RTCPeerConnectionFactory.
+ */
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)av1Encoder;
+
++ (bool)isSupported;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm
new file mode 100644
index 0000000000..d2fe65293b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoderAV1.h"
+#import "RTCWrappedNativeVideoEncoder.h"
+#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderAV1)
+
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)av1Encoder {
+ std::unique_ptr<webrtc::VideoEncoder> nativeEncoder(webrtc::CreateLibaomAv1Encoder());
+ return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) alloc]
+ initWithNativeEncoder:std::move(nativeEncoder)];
+}
+
++ (bool)isSupported {
+ return true;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h
new file mode 100644
index 0000000000..e136a5bda8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoEncoderVP8) : NSObject
+
+/* This returns a VP8 encoder that can be returned from a RTCVideoEncoderFactory injected into
+ * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be
+ * used independently from the RTCPeerConnectionFactory.
+ */
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)vp8Encoder;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm
new file mode 100644
index 0000000000..d72f705813
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoderVP8.h"
+#import "RTCWrappedNativeVideoEncoder.h"
+
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderVP8)
+
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)vp8Encoder {
+ return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) alloc]
+ initWithNativeEncoder:std::unique_ptr<webrtc::VideoEncoder>(webrtc::VP8Encoder::Create())];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h
new file mode 100644
index 0000000000..f7dac6117d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoEncoderVP9) : NSObject
+
+/* This returns a VP9 encoder that can be returned from a RTCVideoEncoderFactory injected into
+ * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be
+ * used independently from the RTCPeerConnectionFactory.
+ */
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)vp9Encoder;
+
++ (bool)isSupported;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm
new file mode 100644
index 0000000000..18a9353f7e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoderVP9.h"
+#import "RTCWrappedNativeVideoEncoder.h"
+
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderVP9)
+
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)vp9Encoder {
+ std::unique_ptr<webrtc::VideoEncoder> nativeEncoder(webrtc::VP9Encoder::Create());
+ if (nativeEncoder == nullptr) {
+ return nil;
+ }
+ return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) alloc]
+ initWithNativeEncoder:std::move(nativeEncoder)];
+}
+
++ (bool)isSupported {
+#if defined(RTC_ENABLE_VP9)
+ return true;
+#else
+ return false;
+#endif
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h
new file mode 100644
index 0000000000..3a9b39e959
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "base/RTCMacros.h"
+#import "base/RTCVideoDecoder.h"
+
+#include "api/video_codecs/video_decoder.h"
+#include "media/base/codec.h"
+
+@interface RTC_OBJC_TYPE (RTCWrappedNativeVideoDecoder) : NSObject <RTC_OBJC_TYPE (RTCVideoDecoder)>
+
+- (instancetype)initWithNativeDecoder:(std::unique_ptr<webrtc::VideoDecoder>)decoder;
+
+/* This moves the ownership of the wrapped decoder to the caller. */
+- (std::unique_ptr<webrtc::VideoDecoder>)releaseWrappedDecoder;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm
new file mode 100644
index 0000000000..261874d20b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCWrappedNativeVideoDecoder.h"
+#import "base/RTCMacros.h"
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCWrappedNativeVideoDecoder) {
+ std::unique_ptr<webrtc::VideoDecoder> _wrappedDecoder;
+}
+
+- (instancetype)initWithNativeDecoder:(std::unique_ptr<webrtc::VideoDecoder>)decoder {
+ if (self = [super init]) {
+ _wrappedDecoder = std::move(decoder);
+ }
+
+ return self;
+}
+
+- (std::unique_ptr<webrtc::VideoDecoder>)releaseWrappedDecoder {
+ return std::move(_wrappedDecoder);
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCVideoDecoder)
+
+- (void)setCallback:(RTCVideoDecoderCallback)callback {
+ RTC_DCHECK_NOTREACHED();
+}
+
+- (NSInteger)startDecodeWithNumberOfCores:(int)numberOfCores {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+- (NSInteger)releaseDecoder {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)encodedImage
+ missingFrames:(BOOL)missingFrames
+ codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
+ renderTimeMs:(int64_t)renderTimeMs {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+- (NSString *)implementationName {
+ RTC_DCHECK_NOTREACHED();
+ return nil;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h
new file mode 100644
index 0000000000..8df9ceec35
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "base/RTCMacros.h"
+#import "base/RTCVideoEncoder.h"
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_encoder.h"
+#include "media/base/codec.h"
+
+@interface RTC_OBJC_TYPE (RTCWrappedNativeVideoEncoder) : NSObject <RTC_OBJC_TYPE (RTCVideoEncoder)>
+
+- (instancetype)initWithNativeEncoder:(std::unique_ptr<webrtc::VideoEncoder>)encoder;
+
+/* This moves the ownership of the wrapped encoder to the caller. */
+- (std::unique_ptr<webrtc::VideoEncoder>)releaseWrappedEncoder;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm
new file mode 100644
index 0000000000..4160572814
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm
@@ -0,0 +1,86 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCWrappedNativeVideoEncoder.h"
+#import "base/RTCMacros.h"
+#import "helpers/NSString+StdString.h"
+
+@implementation RTC_OBJC_TYPE (RTCWrappedNativeVideoEncoder) {
+ std::unique_ptr<webrtc::VideoEncoder> _wrappedEncoder;
+}
+
+- (instancetype)initWithNativeEncoder:(std::unique_ptr<webrtc::VideoEncoder>)encoder {
+ if (self = [super init]) {
+ _wrappedEncoder = std::move(encoder);
+ }
+
+ return self;
+}
+
+- (std::unique_ptr<webrtc::VideoEncoder>)releaseWrappedEncoder {
+ return std::move(_wrappedEncoder);
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCVideoEncoder)
+
+- (void)setCallback:(RTCVideoEncoderCallback)callback {
+ RTC_DCHECK_NOTREACHED();
+}
+
+- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
+ numberOfCores:(int)numberOfCores {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+- (NSInteger)releaseEncoder {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame
+ codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
+ frameTypes:(NSArray<NSNumber *> *)frameTypes {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+- (NSString *)implementationName {
+ RTC_DCHECK_NOTREACHED();
+ return nil;
+}
+
+- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings {
+ RTC_DCHECK_NOTREACHED();
+ return nil;
+}
+
+- (NSInteger)resolutionAlignment {
+ RTC_DCHECK_NOTREACHED();
+ return 1;
+}
+
+- (BOOL)applyAlignmentToAllSimulcastLayers {
+ RTC_DCHECK_NOTREACHED();
+ return NO;
+}
+
+- (BOOL)supportsNativeHandle {
+ RTC_DCHECK_NOTREACHED();
+ return NO;
+}
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h
new file mode 100644
index 0000000000..20dc807991
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNativeI420Buffer.h"
+
+#include "api/video/i420_buffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTC_OBJC_TYPE (RTCI420Buffer)
+() {
+ @protected
+ rtc::scoped_refptr<webrtc::I420BufferInterface> _i420Buffer;
+}
+
+/** Initialize an RTCI420Buffer with its backing I420BufferInterface. */
+- (instancetype)initWithFrameBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer;
+- (rtc::scoped_refptr<webrtc::I420BufferInterface>)nativeI420Buffer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h
new file mode 100644
index 0000000000..3afe2090a2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCI420Buffer.h"
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** RTCI420Buffer implements the RTCI420Buffer protocol */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCI420Buffer) : NSObject<RTC_OBJC_TYPE(RTCI420Buffer)>
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm
new file mode 100644
index 0000000000..d38d72cfd2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNativeI420Buffer+Private.h"
+
+#include "api/video/i420_buffer.h"
+
+#if !defined(NDEBUG) && defined(WEBRTC_IOS)
+#import <UIKit/UIKit.h>
+#include "libyuv/include/libyuv.h"
+#endif
+
+@implementation RTC_OBJC_TYPE (RTCI420Buffer)
+
+- (instancetype)initWithWidth:(int)width height:(int)height {
+ if (self = [super init]) {
+ _i420Buffer = webrtc::I420Buffer::Create(width, height);
+ }
+
+ return self;
+}
+
+- (instancetype)initWithWidth:(int)width
+ height:(int)height
+ dataY:(const uint8_t *)dataY
+ dataU:(const uint8_t *)dataU
+ dataV:(const uint8_t *)dataV {
+ if (self = [super init]) {
+ _i420Buffer = webrtc::I420Buffer::Copy(
+ width, height, dataY, width, dataU, (width + 1) / 2, dataV, (width + 1) / 2);
+ }
+ return self;
+}
+
+- (instancetype)initWithWidth:(int)width
+ height:(int)height
+ strideY:(int)strideY
+ strideU:(int)strideU
+ strideV:(int)strideV {
+ if (self = [super init]) {
+ _i420Buffer = webrtc::I420Buffer::Create(width, height, strideY, strideU, strideV);
+ }
+
+ return self;
+}
+
+- (instancetype)initWithFrameBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer {
+ if (self = [super init]) {
+ _i420Buffer = i420Buffer;
+ }
+
+ return self;
+}
+
+- (int)width {
+ return _i420Buffer->width();
+}
+
+- (int)height {
+ return _i420Buffer->height();
+}
+
+- (int)strideY {
+ return _i420Buffer->StrideY();
+}
+
+- (int)strideU {
+ return _i420Buffer->StrideU();
+}
+
+- (int)strideV {
+ return _i420Buffer->StrideV();
+}
+
+- (int)chromaWidth {
+ return _i420Buffer->ChromaWidth();
+}
+
+- (int)chromaHeight {
+ return _i420Buffer->ChromaHeight();
+}
+
+- (const uint8_t *)dataY {
+ return _i420Buffer->DataY();
+}
+
+- (const uint8_t *)dataU {
+ return _i420Buffer->DataU();
+}
+
+- (const uint8_t *)dataV {
+ return _i420Buffer->DataV();
+}
+
+- (id<RTC_OBJC_TYPE(RTCI420Buffer)>)toI420 {
+ return self;
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::I420BufferInterface>)nativeI420Buffer {
+ return _i420Buffer;
+}
+
+#pragma mark - Debugging
+
+#if !defined(NDEBUG) && defined(WEBRTC_IOS)
+- (id)debugQuickLookObject {
+ UIGraphicsBeginImageContext(CGSizeMake(_i420Buffer->width(), _i420Buffer->height()));
+ CGContextRef c = UIGraphicsGetCurrentContext();
+ uint8_t *ctxData = (uint8_t *)CGBitmapContextGetData(c);
+
+ libyuv::I420ToARGB(_i420Buffer->DataY(),
+ _i420Buffer->StrideY(),
+ _i420Buffer->DataU(),
+ _i420Buffer->StrideU(),
+ _i420Buffer->DataV(),
+ _i420Buffer->StrideV(),
+ ctxData,
+ CGBitmapContextGetBytesPerRow(c),
+ CGBitmapContextGetWidth(c),
+ CGBitmapContextGetHeight(c));
+
+ UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
+ UIGraphicsEndImageContext();
+
+ return image;
+}
+#endif
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h
new file mode 100644
index 0000000000..053a10a304
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCMacros.h"
+#import "RTCMutableI420Buffer.h"
+#import "RTCNativeI420Buffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Mutable version of RTCI420Buffer */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMutableI420Buffer) : RTC_OBJC_TYPE(RTCI420Buffer)<RTC_OBJC_TYPE(RTCMutableI420Buffer)>
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm
new file mode 100644
index 0000000000..1e669bcb9c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNativeMutableI420Buffer.h"
+
+#import "RTCNativeI420Buffer+Private.h"
+
+#include "api/video/i420_buffer.h"
+
+@implementation RTC_OBJC_TYPE (RTCMutableI420Buffer)
+
+- (uint8_t *)mutableDataY {
+ return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataY();
+}
+
+- (uint8_t *)mutableDataU {
+ return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataU();
+}
+
+- (uint8_t *)mutableDataV {
+ return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataV();
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCCodecSpecificInfo.h b/third_party/libwebrtc/sdk/objc/base/RTCCodecSpecificInfo.h
new file mode 100644
index 0000000000..5e7800e524
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCCodecSpecificInfo.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Implement this protocol to pass codec specific info from the encoder.
+ * Corresponds to webrtc::CodecSpecificInfo.
+ */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCCodecSpecificInfo)<NSObject> @end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.h b/third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.h
new file mode 100644
index 0000000000..28529e5906
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoFrame.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Represents an encoded frame's type. */
+typedef NS_ENUM(NSUInteger, RTCFrameType) {
+ RTCFrameTypeEmptyFrame = 0,
+ RTCFrameTypeAudioFrameSpeech = 1,
+ RTCFrameTypeAudioFrameCN = 2,
+ RTCFrameTypeVideoFrameKey = 3,
+ RTCFrameTypeVideoFrameDelta = 4,
+};
+
+typedef NS_ENUM(NSUInteger, RTCVideoContentType) {
+ RTCVideoContentTypeUnspecified,
+ RTCVideoContentTypeScreenshare,
+};
+
+/** Represents an encoded frame. Corresponds to webrtc::EncodedImage. */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCEncodedImage) : NSObject
+
+@property(nonatomic, strong) NSData *buffer;
+@property(nonatomic, assign) int32_t encodedWidth;
+@property(nonatomic, assign) int32_t encodedHeight;
+@property(nonatomic, assign) uint32_t timeStamp;
+@property(nonatomic, assign) int64_t captureTimeMs;
+@property(nonatomic, assign) int64_t ntpTimeMs;
+@property(nonatomic, assign) uint8_t flags;
+@property(nonatomic, assign) int64_t encodeStartMs;
+@property(nonatomic, assign) int64_t encodeFinishMs;
+@property(nonatomic, assign) RTCFrameType frameType;
+@property(nonatomic, assign) RTCVideoRotation rotation;
+@property(nonatomic, strong) NSNumber *qp;
+@property(nonatomic, assign) RTCVideoContentType contentType;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.m b/third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.m
new file mode 100644
index 0000000000..ad8441aabd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCEncodedImage.m
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCEncodedImage.h"
+
+@implementation RTC_OBJC_TYPE (RTCEncodedImage)
+
+@synthesize buffer = _buffer;
+@synthesize encodedWidth = _encodedWidth;
+@synthesize encodedHeight = _encodedHeight;
+@synthesize timeStamp = _timeStamp;
+@synthesize captureTimeMs = _captureTimeMs;
+@synthesize ntpTimeMs = _ntpTimeMs;
+@synthesize flags = _flags;
+@synthesize encodeStartMs = _encodeStartMs;
+@synthesize encodeFinishMs = _encodeFinishMs;
+@synthesize frameType = _frameType;
+@synthesize rotation = _rotation;
+@synthesize qp = _qp;
+@synthesize contentType = _contentType;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCI420Buffer.h b/third_party/libwebrtc/sdk/objc/base/RTCI420Buffer.h
new file mode 100644
index 0000000000..b97f05a5ba
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCI420Buffer.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCYUVPlanarBuffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Protocol for RTCYUVPlanarBuffers containing I420 data */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCI420Buffer)<RTC_OBJC_TYPE(RTCYUVPlanarBuffer)> @end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCLogging.h b/third_party/libwebrtc/sdk/objc/base/RTCLogging.h
new file mode 100644
index 0000000000..754945c8f2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCLogging.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+// Subset of rtc::LoggingSeverity.
+typedef NS_ENUM(NSInteger, RTCLoggingSeverity) {
+ RTCLoggingSeverityVerbose,
+ RTCLoggingSeverityInfo,
+ RTCLoggingSeverityWarning,
+ RTCLoggingSeverityError,
+ RTCLoggingSeverityNone,
+};
+
+// Wrapper for C++ RTC_LOG(sev) macros.
+// Logs the log string to the webrtc logstream for the given severity.
+RTC_EXTERN void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string);
+
+// Wrapper for rtc::LogMessage::LogToDebug.
+// Sets the minimum severity to be logged to console.
+RTC_EXTERN void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity);
+
+// Returns the filename with the path prefix removed.
+RTC_EXTERN NSString* RTCFileName(const char* filePath);
+
+// Some convenience macros.
+
+#define RTCLogString(format, ...) \
+ [NSString stringWithFormat:@"(%@:%d %s): " format, RTCFileName(__FILE__), \
+ __LINE__, __FUNCTION__, ##__VA_ARGS__]
+
+#define RTCLogFormat(severity, format, ...) \
+ do { \
+ NSString* log_string = RTCLogString(format, ##__VA_ARGS__); \
+ RTCLogEx(severity, log_string); \
+ } while (false)
+
+#define RTCLogVerbose(format, ...) \
+ RTCLogFormat(RTCLoggingSeverityVerbose, format, ##__VA_ARGS__)
+
+#define RTCLogInfo(format, ...) \
+ RTCLogFormat(RTCLoggingSeverityInfo, format, ##__VA_ARGS__)
+
+#define RTCLogWarning(format, ...) \
+ RTCLogFormat(RTCLoggingSeverityWarning, format, ##__VA_ARGS__)
+
+#define RTCLogError(format, ...) \
+ RTCLogFormat(RTCLoggingSeverityError, format, ##__VA_ARGS__)
+
+#if !defined(NDEBUG)
+#define RTCLogDebug(format, ...) RTCLogInfo(format, ##__VA_ARGS__)
+#else
+#define RTCLogDebug(format, ...) \
+ do { \
+ } while (false)
+#endif
+
+#define RTCLog(format, ...) RTCLogInfo(format, ##__VA_ARGS__)
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCLogging.mm b/third_party/libwebrtc/sdk/objc/base/RTCLogging.mm
new file mode 100644
index 0000000000..e8dae02efb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCLogging.mm
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCLogging.h"
+
+#include "rtc_base/logging.h"
+
+rtc::LoggingSeverity RTCGetNativeLoggingSeverity(RTCLoggingSeverity severity) {
+ switch (severity) {
+ case RTCLoggingSeverityVerbose:
+ return rtc::LS_VERBOSE;
+ case RTCLoggingSeverityInfo:
+ return rtc::LS_INFO;
+ case RTCLoggingSeverityWarning:
+ return rtc::LS_WARNING;
+ case RTCLoggingSeverityError:
+ return rtc::LS_ERROR;
+ case RTCLoggingSeverityNone:
+ return rtc::LS_NONE;
+ }
+}
+
+void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string) {
+ if (log_string.length) {
+ const char* utf8_string = log_string.UTF8String;
+ RTC_LOG_V(RTCGetNativeLoggingSeverity(severity)) << utf8_string;
+ }
+}
+
+void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity) {
+ rtc::LogMessage::LogToDebug(RTCGetNativeLoggingSeverity(severity));
+}
+
+NSString* RTCFileName(const char* file_path) {
+ NSString* ns_file_path =
+ [[NSString alloc] initWithBytesNoCopy:const_cast<char*>(file_path)
+ length:strlen(file_path)
+ encoding:NSUTF8StringEncoding
+ freeWhenDone:NO];
+ return ns_file_path.lastPathComponent;
+}
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCMacros.h b/third_party/libwebrtc/sdk/objc/base/RTCMacros.h
new file mode 100644
index 0000000000..469e3c93bd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCMacros.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_BASE_RTCMACROS_H_
+#define SDK_OBJC_BASE_RTCMACROS_H_
+
+#ifdef WEBRTC_ENABLE_OBJC_SYMBOL_EXPORT
+
+#if defined(WEBRTC_LIBRARY_IMPL)
+#define RTC_OBJC_EXPORT __attribute__((visibility("default")))
+#endif
+
+#endif // WEBRTC_ENABLE_OBJC_SYMBOL_EXPORT
+
+#ifndef RTC_OBJC_EXPORT
+#define RTC_OBJC_EXPORT
+#endif
+
+// Internal macros used to correctly concatenate symbols.
+#define RTC_SYMBOL_CONCAT_HELPER(a, b) a##b
+#define RTC_SYMBOL_CONCAT(a, b) RTC_SYMBOL_CONCAT_HELPER(a, b)
+
+// RTC_OBJC_TYPE_PREFIX
+//
+// Macro used to prepend a prefix to the API types that are exported with
+// RTC_OBJC_EXPORT.
+//
+// Clients can patch the definition of this macro locally and build
+// WebRTC.framework with their own prefix in case symbol clashing is a
+// problem.
+//
+// This macro must only be defined here and not on via compiler flag to
+// ensure it has a unique value.
+#define RTC_OBJC_TYPE_PREFIX
+
+// RCT_OBJC_TYPE
+//
+// Macro used internally to declare API types. Declaring an API type without
+// using this macro will not include the declared type in the set of types
+// that will be affected by the configurable RTC_OBJC_TYPE_PREFIX.
+#define RTC_OBJC_TYPE(type_name) RTC_SYMBOL_CONCAT(RTC_OBJC_TYPE_PREFIX, type_name)
+
+#if defined(__cplusplus)
+#define RTC_EXTERN extern "C" RTC_OBJC_EXPORT
+#else
+#define RTC_EXTERN extern RTC_OBJC_EXPORT
+#endif
+
+#ifdef __OBJC__
+#define RTC_FWD_DECL_OBJC_CLASS(classname) @class classname
+#else
+#define RTC_FWD_DECL_OBJC_CLASS(classname) typedef struct objc_object classname
+#endif
+
+#endif // SDK_OBJC_BASE_RTCMACROS_H_
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCMutableI420Buffer.h b/third_party/libwebrtc/sdk/objc/base/RTCMutableI420Buffer.h
new file mode 100644
index 0000000000..cde721980b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCMutableI420Buffer.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCI420Buffer.h"
+#import "RTCMutableYUVPlanarBuffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Extension of the I420 buffer with mutable data access */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCMutableI420Buffer)<RTC_OBJC_TYPE(RTCI420Buffer), RTC_OBJC_TYPE(RTCMutableYUVPlanarBuffer)> @end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCMutableYUVPlanarBuffer.h b/third_party/libwebrtc/sdk/objc/base/RTCMutableYUVPlanarBuffer.h
new file mode 100644
index 0000000000..bd14e3bca3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCMutableYUVPlanarBuffer.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCYUVPlanarBuffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Extension of the YUV planar data buffer with mutable data access */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCMutableYUVPlanarBuffer)<RTC_OBJC_TYPE(RTCYUVPlanarBuffer)>
+
+ @property(nonatomic, readonly) uint8_t *mutableDataY;
+@property(nonatomic, readonly) uint8_t *mutableDataU;
+@property(nonatomic, readonly) uint8_t *mutableDataV;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCSSLCertificateVerifier.h b/third_party/libwebrtc/sdk/objc/base/RTCSSLCertificateVerifier.h
new file mode 100644
index 0000000000..53da0cceff
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCSSLCertificateVerifier.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE
+(RTCSSLCertificateVerifier)<NSObject>
+
+ /** The certificate to verify */
+ - (BOOL)verify : (NSData *)derCertificate;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.h
new file mode 100644
index 0000000000..a1ffdcf38e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoFrame.h"
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCVideoCapturer);
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoCapturerDelegate)<NSObject> -
+ (void)capturer : (RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer didCaptureVideoFrame
+ : (RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
+@end
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoCapturer) : NSObject
+
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)> delegate;
+
+- (instancetype)initWithDelegate:(id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.m b/third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.m
new file mode 100644
index 0000000000..ca31a731f0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoCapturer.m
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoCapturer.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoCapturer)
+
+@synthesize delegate = _delegate;
+
+- (instancetype)initWithDelegate:(id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
+ if (self = [super init]) {
+ _delegate = delegate;
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.h
new file mode 100644
index 0000000000..fa28958f25
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Holds information to identify a codec. Corresponds to webrtc::SdpVideoFormat. */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoCodecInfo) : NSObject <NSCoding>
+
+- (instancetype)init NS_UNAVAILABLE;
+
+- (instancetype)initWithName:(NSString *)name;
+
+- (instancetype)initWithName:(NSString *)name
+ parameters:(nullable NSDictionary<NSString *, NSString *> *)parameters
+ NS_DESIGNATED_INITIALIZER;
+
+- (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;
+
+@property(nonatomic, readonly) NSString *name;
+@property(nonatomic, readonly) NSDictionary<NSString *, NSString *> *parameters;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.m b/third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.m
new file mode 100644
index 0000000000..ce26ae1de3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoCodecInfo.m
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoCodecInfo.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo)
+
+@synthesize name = _name;
+@synthesize parameters = _parameters;
+
+- (instancetype)initWithName:(NSString *)name {
+ return [self initWithName:name parameters:nil];
+}
+
+- (instancetype)initWithName:(NSString *)name
+ parameters:(nullable NSDictionary<NSString *, NSString *> *)parameters {
+ if (self = [super init]) {
+ _name = name;
+ _parameters = (parameters ? parameters : @{});
+ }
+
+ return self;
+}
+
+- (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
+ if (!info ||
+ ![self.name isEqualToString:info.name] ||
+ ![self.parameters isEqualToDictionary:info.parameters]) {
+ return NO;
+ }
+ return YES;
+}
+
+- (BOOL)isEqual:(id)object {
+ if (self == object)
+ return YES;
+ if (![object isKindOfClass:[self class]])
+ return NO;
+ return [self isEqualToCodecInfo:object];
+}
+
+- (NSUInteger)hash {
+ return [self.name hash] ^ [self.parameters hash];
+}
+
+#pragma mark - NSCoding
+
+- (instancetype)initWithCoder:(NSCoder *)decoder {
+ return [self initWithName:[decoder decodeObjectForKey:@"name"]
+ parameters:[decoder decodeObjectForKey:@"parameters"]];
+}
+
+- (void)encodeWithCoder:(NSCoder *)encoder {
+ [encoder encodeObject:_name forKey:@"name"];
+ [encoder encodeObject:_parameters forKey:@"parameters"];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoDecoder.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoDecoder.h
new file mode 100644
index 0000000000..ccddd42d42
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoDecoder.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCCodecSpecificInfo.h"
+#import "RTCEncodedImage.h"
+#import "RTCMacros.h"
+#import "RTCVideoEncoderSettings.h"
+#import "RTCVideoFrame.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Callback block for decoder. */
+typedef void (^RTCVideoDecoderCallback)(RTC_OBJC_TYPE(RTCVideoFrame) * frame);
+
+/** Protocol for decoder implementations. */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoDecoder)<NSObject>
+
+ - (void)setCallback : (RTCVideoDecoderCallback)callback;
+- (NSInteger)startDecodeWithNumberOfCores:(int)numberOfCores;
+- (NSInteger)releaseDecoder;
+- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)encodedImage
+ missingFrames:(BOOL)missingFrames
+ codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
+ renderTimeMs:(int64_t)renderTimeMs;
+- (NSString *)implementationName;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoDecoderFactory.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoDecoderFactory.h
new file mode 100644
index 0000000000..8d90138521
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoDecoderFactory.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoCodecInfo.h"
+#import "RTCVideoDecoder.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** RTCVideoDecoderFactory is an Objective-C version of webrtc::VideoDecoderFactory.
+ */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoDecoderFactory)<NSObject>
+
+ - (nullable id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder
+ : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)
+ supportedCodecs; // TODO(andersc): "supportedFormats" instead?
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoder.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoder.h
new file mode 100644
index 0000000000..2445d432d6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoder.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCCodecSpecificInfo.h"
+#import "RTCEncodedImage.h"
+#import "RTCMacros.h"
+#import "RTCVideoEncoderQpThresholds.h"
+#import "RTCVideoEncoderSettings.h"
+#import "RTCVideoFrame.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Callback block for encoder. */
+typedef BOOL (^RTCVideoEncoderCallback)(RTC_OBJC_TYPE(RTCEncodedImage) * frame,
+ id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)> info);
+
+/** Protocol for encoder implementations. */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoder)<NSObject>
+
+- (void)setCallback:(nullable RTCVideoEncoderCallback)callback;
+- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
+ numberOfCores:(int)numberOfCores;
+- (NSInteger)releaseEncoder;
+- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame
+ codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
+ frameTypes:(NSArray<NSNumber *> *)frameTypes;
+- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate;
+- (NSString *)implementationName;
+
+/** Returns QP scaling settings for encoder. The quality scaler adjusts the resolution in order to
+ * keep the QP from the encoded images within the given range. Returning nil from this function
+ * disables quality scaling. */
+- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings;
+
+/** Resolutions should be aligned to this value. */
+@property(nonatomic, readonly) NSInteger resolutionAlignment;
+
+/** If enabled, resolution alignment is applied to all simulcast layers simultaneously so that when
+ scaled, all resolutions comply with 'resolutionAlignment'. */
+@property(nonatomic, readonly) BOOL applyAlignmentToAllSimulcastLayers;
+
+/** If YES, the receiver is expected to resample/scale the source texture to the expected output
+ size. */
+@property(nonatomic, readonly) BOOL supportsNativeHandle;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderFactory.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderFactory.h
new file mode 100644
index 0000000000..a73cd77990
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderFactory.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoCodecInfo.h"
+#import "RTCVideoEncoder.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** RTCVideoEncoderFactory is an Objective-C version of
+ webrtc::VideoEncoderFactory::VideoEncoderSelector.
+ */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoderSelector)<NSObject>
+
+ - (void)registerCurrentEncoderInfo : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;
+- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBitrate:(NSInteger)bitrate;
+- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBrokenEncoder;
+
+@optional
+- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForResolutionChangeBySize:(CGSize)size;
+
+@end
+
+/** RTCVideoEncoderFactory is an Objective-C version of webrtc::VideoEncoderFactory.
+ */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoderFactory)<NSObject>
+
+ - (nullable id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder
+ : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)
+ supportedCodecs; // TODO(andersc): "supportedFormats" instead?
+
+@optional
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)implementations;
+- (nullable id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)>)encoderSelector;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.h
new file mode 100644
index 0000000000..1a6e9e88ab
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** QP thresholds for encoder. Corresponds to webrtc::VideoEncoder::QpThresholds. */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoEncoderQpThresholds) : NSObject
+
+- (instancetype)initWithThresholdsLow:(NSInteger)low high:(NSInteger)high;
+
+@property(nonatomic, readonly) NSInteger low;
+@property(nonatomic, readonly) NSInteger high;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.m b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.m
new file mode 100644
index 0000000000..fb7012f44f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderQpThresholds.m
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoEncoderQpThresholds.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderQpThresholds)
+
+@synthesize low = _low;
+@synthesize high = _high;
+
+- (instancetype)initWithThresholdsLow:(NSInteger)low high:(NSInteger)high {
+ if (self = [super init]) {
+ _low = low;
+ _high = high;
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.h
new file mode 100644
index 0000000000..ae792eab71
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+typedef NS_ENUM(NSUInteger, RTCVideoCodecMode) {
+ RTCVideoCodecModeRealtimeVideo,
+ RTCVideoCodecModeScreensharing,
+};
+
+/** Settings for encoder. Corresponds to webrtc::VideoCodec. */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoEncoderSettings) : NSObject
+
+@property(nonatomic, strong) NSString *name;
+
+@property(nonatomic, assign) unsigned short width;
+@property(nonatomic, assign) unsigned short height;
+
+@property(nonatomic, assign) unsigned int startBitrate; // kilobits/sec.
+@property(nonatomic, assign) unsigned int maxBitrate;
+@property(nonatomic, assign) unsigned int minBitrate;
+
+@property(nonatomic, assign) uint32_t maxFramerate;
+
+@property(nonatomic, assign) unsigned int qpMax;
+@property(nonatomic, assign) RTCVideoCodecMode mode;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.m b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.m
new file mode 100644
index 0000000000..f66cd2cf77
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoEncoderSettings.m
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoEncoderSettings.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderSettings)
+
+@synthesize name = _name;
+@synthesize width = _width;
+@synthesize height = _height;
+@synthesize startBitrate = _startBitrate;
+@synthesize maxBitrate = _maxBitrate;
+@synthesize minBitrate = _minBitrate;
+@synthesize maxFramerate = _maxFramerate;
+@synthesize qpMax = _qpMax;
+@synthesize mode = _mode;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.h
new file mode 100644
index 0000000000..f5638d27cf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+typedef NS_ENUM(NSInteger, RTCVideoRotation) {
+ RTCVideoRotation_0 = 0,
+ RTCVideoRotation_90 = 90,
+ RTCVideoRotation_180 = 180,
+ RTCVideoRotation_270 = 270,
+};
+
+@protocol RTC_OBJC_TYPE
+(RTCVideoFrameBuffer);
+
+// RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame.
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoFrame) : NSObject
+
+/** Width without rotation applied. */
+@property(nonatomic, readonly) int width;
+
+/** Height without rotation applied. */
+@property(nonatomic, readonly) int height;
+@property(nonatomic, readonly) RTCVideoRotation rotation;
+
+/** Timestamp in nanoseconds. */
+@property(nonatomic, readonly) int64_t timeStampNs;
+
+/** Timestamp 90 kHz. */
+@property(nonatomic, assign) int32_t timeStamp;
+
+@property(nonatomic, readonly) id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> buffer;
+
+- (instancetype)init NS_UNAVAILABLE;
+- (instancetype) new NS_UNAVAILABLE;
+
+/** Initialize an RTCVideoFrame from a pixel buffer, rotation, and timestamp.
+ * Deprecated - initialize with a RTCCVPixelBuffer instead
+ */
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
+ rotation:(RTCVideoRotation)rotation
+ timeStampNs:(int64_t)timeStampNs
+ DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead");
+
+/** Initialize an RTCVideoFrame from a pixel buffer combined with cropping and
+ * scaling. Cropping will be applied first on the pixel buffer, followed by
+ * scaling to the final resolution of scaledWidth x scaledHeight.
+ */
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
+ scaledWidth:(int)scaledWidth
+ scaledHeight:(int)scaledHeight
+ cropWidth:(int)cropWidth
+ cropHeight:(int)cropHeight
+ cropX:(int)cropX
+ cropY:(int)cropY
+ rotation:(RTCVideoRotation)rotation
+ timeStampNs:(int64_t)timeStampNs
+ DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead");
+
+/** Initialize an RTCVideoFrame from a frame buffer, rotation, and timestamp.
+ */
+- (instancetype)initWithBuffer:(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>)frameBuffer
+ rotation:(RTCVideoRotation)rotation
+ timeStampNs:(int64_t)timeStampNs;
+
+/** Return a frame that is guaranteed to be I420, i.e. it is possible to access
+ * the YUV data on it.
+ */
+- (RTC_OBJC_TYPE(RTCVideoFrame) *)newI420VideoFrame;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.mm b/third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.mm
new file mode 100644
index 0000000000..e162238d73
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoFrame.mm
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoFrame.h"
+
+#import "RTCI420Buffer.h"
+#import "RTCVideoFrameBuffer.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoFrame) {
+ RTCVideoRotation _rotation;
+ int64_t _timeStampNs;
+}
+
+@synthesize buffer = _buffer;
+@synthesize timeStamp;
+
+- (int)width {
+ return _buffer.width;
+}
+
+- (int)height {
+ return _buffer.height;
+}
+
+- (RTCVideoRotation)rotation {
+ return _rotation;
+}
+
+- (int64_t)timeStampNs {
+ return _timeStampNs;
+}
+
+- (RTC_OBJC_TYPE(RTCVideoFrame) *)newI420VideoFrame {
+ return [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:[_buffer toI420]
+ rotation:_rotation
+ timeStampNs:_timeStampNs];
+}
+
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
+ rotation:(RTCVideoRotation)rotation
+ timeStampNs:(int64_t)timeStampNs {
+ // Deprecated.
+ return nil;
+}
+
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
+ scaledWidth:(int)scaledWidth
+ scaledHeight:(int)scaledHeight
+ cropWidth:(int)cropWidth
+ cropHeight:(int)cropHeight
+ cropX:(int)cropX
+ cropY:(int)cropY
+ rotation:(RTCVideoRotation)rotation
+ timeStampNs:(int64_t)timeStampNs {
+ // Deprecated.
+ return nil;
+}
+
+- (instancetype)initWithBuffer:(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>)buffer
+ rotation:(RTCVideoRotation)rotation
+ timeStampNs:(int64_t)timeStampNs {
+ if (self = [super init]) {
+ _buffer = buffer;
+ _rotation = rotation;
+ _timeStampNs = timeStampNs;
+ }
+
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoFrameBuffer.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoFrameBuffer.h
new file mode 100644
index 0000000000..82d057eea0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoFrameBuffer.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@protocol RTC_OBJC_TYPE
+(RTCI420Buffer);
+
+// RTCVideoFrameBuffer is an ObjectiveC version of webrtc::VideoFrameBuffer.
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoFrameBuffer)<NSObject>
+
+ @property(nonatomic, readonly) int width;
+@property(nonatomic, readonly) int height;
+
+- (id<RTC_OBJC_TYPE(RTCI420Buffer)>)toI420;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCVideoRenderer.h b/third_party/libwebrtc/sdk/objc/base/RTCVideoRenderer.h
new file mode 100644
index 0000000000..0f763295ad
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCVideoRenderer.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#if TARGET_OS_IPHONE
+#import <UIKit/UIKit.h>
+#endif
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCVideoFrame);
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoRenderer)<NSObject>
+
+ /** The size of the frame. */
+ - (void)setSize : (CGSize)size;
+
+/** The frame to be displayed. */
+- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
+
+@end
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoViewDelegate)
+
+ - (void)videoView : (id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize
+ : (CGSize)size;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/base/RTCYUVPlanarBuffer.h b/third_party/libwebrtc/sdk/objc/base/RTCYUVPlanarBuffer.h
new file mode 100644
index 0000000000..be01b915f5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/base/RTCYUVPlanarBuffer.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoFrameBuffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Protocol for RTCVideoFrameBuffers containing YUV planar data. */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCYUVPlanarBuffer)<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>
+
+ @property(nonatomic, readonly) int chromaWidth;
+@property(nonatomic, readonly) int chromaHeight;
+@property(nonatomic, readonly) const uint8_t *dataY;
+@property(nonatomic, readonly) const uint8_t *dataU;
+@property(nonatomic, readonly) const uint8_t *dataV;
+@property(nonatomic, readonly) int strideY;
+@property(nonatomic, readonly) int strideU;
+@property(nonatomic, readonly) int strideV;
+
+- (instancetype)initWithWidth:(int)width
+ height:(int)height
+ dataY:(const uint8_t *)dataY
+ dataU:(const uint8_t *)dataU
+ dataV:(const uint8_t *)dataV;
+- (instancetype)initWithWidth:(int)width height:(int)height;
+- (instancetype)initWithWidth:(int)width
+ height:(int)height
+ strideY:(int)strideY
+ strideU:(int)strideU
+ strideV:(int)strideV;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Configuration.mm b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Configuration.mm
new file mode 100644
index 0000000000..449f31e9dd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Configuration.mm
@@ -0,0 +1,176 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioSession+Private.h"
+#import "RTCAudioSessionConfiguration.h"
+
+#import "base/RTCLogging.h"
+
+@implementation RTC_OBJC_TYPE (RTCAudioSession)
+(Configuration)
+
+ - (BOOL)setConfiguration : (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error
+ : (NSError **)outError {
+ return [self setConfiguration:configuration
+ active:NO
+ shouldSetActive:NO
+ error:outError];
+}
+
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
+ active:(BOOL)active
+ error:(NSError **)outError {
+ return [self setConfiguration:configuration
+ active:active
+ shouldSetActive:YES
+ error:outError];
+}
+
+#pragma mark - Private
+
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
+ active:(BOOL)active
+ shouldSetActive:(BOOL)shouldSetActive
+ error:(NSError **)outError {
+ NSParameterAssert(configuration);
+ if (outError) {
+ *outError = nil;
+ }
+
+ // Provide an error even if there isn't one so we can log it. We will not
+ // return immediately on error in this function and instead try to set
+ // everything we can.
+ NSError *error = nil;
+
+ if (self.category != configuration.category ||
+ self.categoryOptions != configuration.categoryOptions) {
+ NSError *categoryError = nil;
+ if (![self setCategory:configuration.category
+ withOptions:configuration.categoryOptions
+ error:&categoryError]) {
+ RTCLogError(@"Failed to set category: %@",
+ categoryError.localizedDescription);
+ error = categoryError;
+ } else {
+ RTCLog(@"Set category to: %@", configuration.category);
+ }
+ }
+
+ if (self.mode != configuration.mode) {
+ NSError *modeError = nil;
+ if (![self setMode:configuration.mode error:&modeError]) {
+ RTCLogError(@"Failed to set mode: %@",
+ modeError.localizedDescription);
+ error = modeError;
+ } else {
+ RTCLog(@"Set mode to: %@", configuration.mode);
+ }
+ }
+
+ // Sometimes category options don't stick after setting mode.
+ if (self.categoryOptions != configuration.categoryOptions) {
+ NSError *categoryError = nil;
+ if (![self setCategory:configuration.category
+ withOptions:configuration.categoryOptions
+ error:&categoryError]) {
+ RTCLogError(@"Failed to set category options: %@",
+ categoryError.localizedDescription);
+ error = categoryError;
+ } else {
+ RTCLog(@"Set category options to: %ld",
+ (long)configuration.categoryOptions);
+ }
+ }
+
+ if (self.preferredSampleRate != configuration.sampleRate) {
+ NSError *sampleRateError = nil;
+ if (![self setPreferredSampleRate:configuration.sampleRate
+ error:&sampleRateError]) {
+ RTCLogError(@"Failed to set preferred sample rate: %@",
+ sampleRateError.localizedDescription);
+ if (!self.ignoresPreferredAttributeConfigurationErrors) {
+ error = sampleRateError;
+ }
+ } else {
+ RTCLog(@"Set preferred sample rate to: %.2f",
+ configuration.sampleRate);
+ }
+ }
+
+ if (self.preferredIOBufferDuration != configuration.ioBufferDuration) {
+ NSError *bufferDurationError = nil;
+ if (![self setPreferredIOBufferDuration:configuration.ioBufferDuration
+ error:&bufferDurationError]) {
+ RTCLogError(@"Failed to set preferred IO buffer duration: %@",
+ bufferDurationError.localizedDescription);
+ if (!self.ignoresPreferredAttributeConfigurationErrors) {
+ error = bufferDurationError;
+ }
+ } else {
+ RTCLog(@"Set preferred IO buffer duration to: %f",
+ configuration.ioBufferDuration);
+ }
+ }
+
+ if (shouldSetActive) {
+ NSError *activeError = nil;
+ if (![self setActive:active error:&activeError]) {
+ RTCLogError(@"Failed to setActive to %d: %@",
+ active, activeError.localizedDescription);
+ error = activeError;
+ }
+ }
+
+ if (self.isActive &&
+ // TODO(tkchin): Figure out which category/mode numChannels is valid for.
+ [self.mode isEqualToString:AVAudioSessionModeVoiceChat]) {
+ // Try to set the preferred number of hardware audio channels. These calls
+ // must be done after setting the audio session’s category and mode and
+ // activating the session.
+ NSInteger inputNumberOfChannels = configuration.inputNumberOfChannels;
+ if (self.inputNumberOfChannels != inputNumberOfChannels) {
+ NSError *inputChannelsError = nil;
+ if (![self setPreferredInputNumberOfChannels:inputNumberOfChannels
+ error:&inputChannelsError]) {
+ RTCLogError(@"Failed to set preferred input number of channels: %@",
+ inputChannelsError.localizedDescription);
+ if (!self.ignoresPreferredAttributeConfigurationErrors) {
+ error = inputChannelsError;
+ }
+ } else {
+ RTCLog(@"Set input number of channels to: %ld",
+ (long)inputNumberOfChannels);
+ }
+ }
+ NSInteger outputNumberOfChannels = configuration.outputNumberOfChannels;
+ if (self.outputNumberOfChannels != outputNumberOfChannels) {
+ NSError *outputChannelsError = nil;
+ if (![self setPreferredOutputNumberOfChannels:outputNumberOfChannels
+ error:&outputChannelsError]) {
+ RTCLogError(@"Failed to set preferred output number of channels: %@",
+ outputChannelsError.localizedDescription);
+ if (!self.ignoresPreferredAttributeConfigurationErrors) {
+ error = outputChannelsError;
+ }
+ } else {
+ RTCLog(@"Set output number of channels to: %ld",
+ (long)outputNumberOfChannels);
+ }
+ }
+ }
+
+ if (outError) {
+ *outError = error;
+ }
+
+ return error == nil;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Private.h b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Private.h
new file mode 100644
index 0000000000..2be1b9fb3d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession+Private.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioSession.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCAudioSessionConfiguration);
+
+@interface RTC_OBJC_TYPE (RTCAudioSession)
+()
+
+ /** Number of times setActive:YES has succeeded without a balanced call to
+ * setActive:NO.
+ */
+ @property(nonatomic, readonly) int activationCount;
+
+/** The number of times `beginWebRTCSession` was called without a balanced call
+ * to `endWebRTCSession`.
+ */
+@property(nonatomic, readonly) int webRTCSessionCount;
+
+/** Convenience BOOL that checks useManualAudio and isAudioEnebled. */
+@property(readonly) BOOL canPlayOrRecord;
+
+/** Tracks whether we have been sent an interruption event that hasn't been matched by either an
+ * interrupted end event or a foreground event.
+ */
+@property(nonatomic, assign) BOOL isInterrupted;
+
+/** Adds the delegate to the list of delegates, and places it at the front of
+ * the list. This delegate will be notified before other delegates of
+ * audio events.
+ */
+- (void)pushDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate;
+
+/** Signals RTCAudioSession that a WebRTC session is about to begin and
+ * audio configuration is needed. Will configure the audio session for WebRTC
+ * if not already configured and if configuration is not delayed.
+ * Successful calls must be balanced by a call to endWebRTCSession.
+ */
+- (BOOL)beginWebRTCSession:(NSError **)outError;
+
+/** Signals RTCAudioSession that a WebRTC session is about to end and audio
+ * unconfiguration is needed. Will unconfigure the audio session for WebRTC
+ * if this is the last unmatched call and if configuration is not delayed.
+ */
+- (BOOL)endWebRTCSession:(NSError **)outError;
+
+/** Configure the audio session for WebRTC. This call will fail if the session
+ * is already configured. On other failures, we will attempt to restore the
+ * previously used audio session configuration.
+ * `lockForConfiguration` must be called first.
+ * Successful calls to configureWebRTCSession must be matched by calls to
+ * `unconfigureWebRTCSession`.
+ */
+- (BOOL)configureWebRTCSession:(NSError **)outError;
+
+/** Unconfigures the session for WebRTC. This will attempt to restore the
+ * audio session to the settings used before `configureWebRTCSession` was
+ * called.
+ * `lockForConfiguration` must be called first.
+ */
+- (BOOL)unconfigureWebRTCSession:(NSError **)outError;
+
+/** Returns a configuration error with the given description. */
+- (NSError *)configurationErrorWithDescription:(NSString *)description;
+
+/** Notifies the receiver that a playout glitch was detected. */
+- (void)notifyDidDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches;
+
+/** Notifies the receiver that there was an error when starting an audio unit. */
+- (void)notifyAudioUnitStartFailedWithError:(OSStatus)error;
+
+// Properties and methods for tests.
+- (void)notifyDidBeginInterruption;
+- (void)notifyDidEndInterruptionWithShouldResumeSession:(BOOL)shouldResumeSession;
+- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
+ previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
+- (void)notifyMediaServicesWereLost;
+- (void)notifyMediaServicesWereReset;
+- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
+- (void)notifyDidStartPlayOrRecord;
+- (void)notifyDidStopPlayOrRecord;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.h b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.h
new file mode 100644
index 0000000000..3b83b27ba5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.h
@@ -0,0 +1,265 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+extern NSString *const kRTCAudioSessionErrorDomain;
+/** Method that requires lock was called without lock. */
+extern NSInteger const kRTCAudioSessionErrorLockRequired;
+/** Unknown configuration error occurred. */
+extern NSInteger const kRTCAudioSessionErrorConfiguration;
+
+@class RTC_OBJC_TYPE(RTCAudioSession);
+@class RTC_OBJC_TYPE(RTCAudioSessionConfiguration);
+
+// Surfaces AVAudioSession events. WebRTC will listen directly for notifications
+// from AVAudioSession and handle them before calling these delegate methods,
+// at which point applications can perform additional processing if required.
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCAudioSessionDelegate)<NSObject>
+
+ @optional
+/** Called on a system notification thread when AVAudioSession starts an
+ * interruption event.
+ */
+- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
+
+/** Called on a system notification thread when AVAudioSession ends an
+ * interruption event.
+ */
+- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ shouldResumeSession:(BOOL)shouldResumeSession;
+
+/** Called on a system notification thread when AVAudioSession changes the
+ * route.
+ */
+- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ reason:(AVAudioSessionRouteChangeReason)reason
+ previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
+
+/** Called on a system notification thread when AVAudioSession media server
+ * terminates.
+ */
+- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
+
+/** Called on a system notification thread when AVAudioSession media server
+ * restarts.
+ */
+- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
+
+// TODO(tkchin): Maybe handle SilenceSecondaryAudioHintNotification.
+
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
+
+/** Called on a WebRTC thread when the audio device is notified to begin
+ * playback or recording.
+ */
+- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
+
+/** Called on a WebRTC thread when the audio device is notified to stop
+ * playback or recording.
+ */
+- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
+
+/** Called when the AVAudioSession output volume value changes. */
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+ didChangeOutputVolume:(float)outputVolume;
+
+/** Called when the audio device detects a playout glitch. The argument is the
+ * number of glitches detected so far in the current audio playout session.
+ */
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+ didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches;
+
+/** Called when the audio session is about to change the active state.
+ */
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession willSetActive:(BOOL)active;
+
+/** Called after the audio session sucessfully changed the active state.
+ */
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession didSetActive:(BOOL)active;
+
+/** Called after the audio session failed to change the active state.
+ */
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+ failedToSetActive:(BOOL)active
+ error:(NSError *)error;
+
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+ audioUnitStartFailedWithError:(NSError *)error;
+
+@end
+
+/** This is a protocol used to inform RTCAudioSession when the audio session
+ * activation state has changed outside of RTCAudioSession. The current known use
+ * case of this is when CallKit activates the audio session for the application
+ */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCAudioSessionActivationDelegate)<NSObject>
+
+ /** Called when the audio session is activated outside of the app by iOS. */
+ - (void)audioSessionDidActivate : (AVAudioSession *)session;
+
+/** Called when the audio session is deactivated outside of the app by iOS. */
+- (void)audioSessionDidDeactivate:(AVAudioSession *)session;
+
+@end
+
+/** Proxy class for AVAudioSession that adds a locking mechanism similar to
+ * AVCaptureDevice. This is used to that interleaving configurations between
+ * WebRTC and the application layer are avoided.
+ *
+ * RTCAudioSession also coordinates activation so that the audio session is
+ * activated only once. See `setActive:error:`.
+ */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCAudioSession) : NSObject <RTC_OBJC_TYPE(RTCAudioSessionActivationDelegate)>
+
+/** Convenience property to access the AVAudioSession singleton. Callers should
+ * not call setters on AVAudioSession directly, but other method invocations
+ * are fine.
+ */
+@property(nonatomic, readonly) AVAudioSession *session;
+
+/** Our best guess at whether the session is active based on results of calls to
+ * AVAudioSession.
+ */
+@property(nonatomic, readonly) BOOL isActive;
+
+/** If YES, WebRTC will not initialize the audio unit automatically when an
+ * audio track is ready for playout or recording. Instead, applications should
+ * call setIsAudioEnabled. If NO, WebRTC will initialize the audio unit
+ * as soon as an audio track is ready for playout or recording.
+ */
+@property(nonatomic, assign) BOOL useManualAudio;
+
+/** This property is only effective if useManualAudio is YES.
+ * Represents permission for WebRTC to initialize the VoIP audio unit.
+ * When set to NO, if the VoIP audio unit used by WebRTC is active, it will be
+ * stopped and uninitialized. This will stop incoming and outgoing audio.
+ * When set to YES, WebRTC will initialize and start the audio unit when it is
+ * needed (e.g. due to establishing an audio connection).
+ * This property was introduced to work around an issue where if an AVPlayer is
+ * playing audio while the VoIP audio unit is initialized, its audio would be
+ * either cut off completely or played at a reduced volume. By preventing
+ * the audio unit from being initialized until after the audio has completed,
+ * we are able to prevent the abrupt cutoff.
+ */
+@property(nonatomic, assign) BOOL isAudioEnabled;
+
+// Proxy properties.
+@property(readonly) NSString *category;
+@property(readonly) AVAudioSessionCategoryOptions categoryOptions;
+@property(readonly) NSString *mode;
+@property(readonly) BOOL secondaryAudioShouldBeSilencedHint;
+@property(readonly) AVAudioSessionRouteDescription *currentRoute;
+@property(readonly) NSInteger maximumInputNumberOfChannels;
+@property(readonly) NSInteger maximumOutputNumberOfChannels;
+@property(readonly) float inputGain;
+@property(readonly) BOOL inputGainSettable;
+@property(readonly) BOOL inputAvailable;
+@property(readonly, nullable) NSArray<AVAudioSessionDataSourceDescription *> *inputDataSources;
+@property(readonly, nullable) AVAudioSessionDataSourceDescription *inputDataSource;
+@property(readonly, nullable) NSArray<AVAudioSessionDataSourceDescription *> *outputDataSources;
+@property(readonly, nullable) AVAudioSessionDataSourceDescription *outputDataSource;
+@property(readonly) double sampleRate;
+@property(readonly) double preferredSampleRate;
+@property(readonly) NSInteger inputNumberOfChannels;
+@property(readonly) NSInteger outputNumberOfChannels;
+@property(readonly) float outputVolume;
+@property(readonly) NSTimeInterval inputLatency;
+@property(readonly) NSTimeInterval outputLatency;
+@property(readonly) NSTimeInterval IOBufferDuration;
+@property(readonly) NSTimeInterval preferredIOBufferDuration;
+
+/**
+ When YES, calls to -setConfiguration:error: and -setConfiguration:active:error: ignore errors in
+ configuring the audio session's "preferred" attributes (e.g. preferredInputNumberOfChannels).
+ Typically, configurations to preferred attributes are optimizations, and ignoring this type of
+ configuration error allows code flow to continue along the happy path when these optimization are
+ not available. The default value of this property is NO.
+ */
+@property(nonatomic) BOOL ignoresPreferredAttributeConfigurationErrors;
+
+/** Default constructor. */
++ (instancetype)sharedInstance;
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Adds a delegate, which is held weakly. */
+- (void)addDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate;
+/** Removes an added delegate. */
+- (void)removeDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate;
+
+/** Request exclusive access to the audio session for configuration. This call
+ * will block if the lock is held by another object.
+ */
+- (void)lockForConfiguration;
+/** Relinquishes exclusive access to the audio session. */
+- (void)unlockForConfiguration;
+
+/** If `active`, activates the audio session if it isn't already active.
+ * Successful calls must be balanced with a setActive:NO when activation is no
+ * longer required. If not `active`, deactivates the audio session if one is
+ * active and this is the last balanced call. When deactivating, the
+ * AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation option is passed to
+ * AVAudioSession.
+ */
+- (BOOL)setActive:(BOOL)active error:(NSError **)outError;
+
+// The following methods are proxies for the associated methods on
+// AVAudioSession. `lockForConfiguration` must be called before using them
+// otherwise they will fail with kRTCAudioSessionErrorLockRequired.
+
+- (BOOL)setCategory:(NSString *)category
+ withOptions:(AVAudioSessionCategoryOptions)options
+ error:(NSError **)outError;
+- (BOOL)setMode:(NSString *)mode error:(NSError **)outError;
+- (BOOL)setInputGain:(float)gain error:(NSError **)outError;
+- (BOOL)setPreferredSampleRate:(double)sampleRate error:(NSError **)outError;
+- (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration error:(NSError **)outError;
+- (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count error:(NSError **)outError;
+- (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count error:(NSError **)outError;
+- (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride error:(NSError **)outError;
+- (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort error:(NSError **)outError;
+- (BOOL)setInputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
+ error:(NSError **)outError;
+- (BOOL)setOutputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
+ error:(NSError **)outError;
+@end
+
+@interface RTC_OBJC_TYPE (RTCAudioSession)
+(Configuration)
+
+ /** Applies the configuration to the current session. Attempts to set all
+ * properties even if previous ones fail. Only the last error will be
+ * returned.
+ * `lockForConfiguration` must be called first.
+ */
+ - (BOOL)setConfiguration : (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error
+ : (NSError **)outError;
+
+/** Convenience method that calls both setConfiguration and setActive.
+ * `lockForConfiguration` must be called first.
+ */
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
+ active:(BOOL)active
+ error:(NSError **)outError;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.mm b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.mm
new file mode 100644
index 0000000000..550a426d36
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSession.mm
@@ -0,0 +1,1000 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioSession+Private.h"
+
+#import <UIKit/UIKit.h>
+
+#include <atomic>
+#include <vector>
+
+#include "absl/base/attributes.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/synchronization/mutex.h"
+
+#import "RTCAudioSessionConfiguration.h"
+#import "base/RTCLogging.h"
+
+#if !defined(ABSL_HAVE_THREAD_LOCAL)
+#error ABSL_HAVE_THREAD_LOCAL should be defined for MacOS / iOS Targets.
+#endif
+
+NSString *const kRTCAudioSessionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCAudioSession)";
+NSInteger const kRTCAudioSessionErrorLockRequired = -1;
+NSInteger const kRTCAudioSessionErrorConfiguration = -2;
+NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
+
+namespace {
+// Since webrtc::Mutex is not a reentrant lock and cannot check if the mutex is locked,
+// we need a separate variable to check that the mutex is locked in the RTCAudioSession.
+ABSL_CONST_INIT thread_local bool mutex_locked = false;
+} // namespace
+
+@interface RTC_OBJC_TYPE (RTCAudioSession)
+() @property(nonatomic,
+ readonly) std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> > delegates;
+@end
+
+// This class needs to be thread-safe because it is accessed from many threads.
+// TODO(tkchin): Consider more granular locking. We're not expecting a lot of
+// lock contention so coarse locks should be fine for now.
+@implementation RTC_OBJC_TYPE (RTCAudioSession) {
+ webrtc::Mutex _mutex;
+ AVAudioSession *_session;
+ std::atomic<int> _activationCount;
+ std::atomic<int> _webRTCSessionCount;
+ BOOL _isActive;
+ BOOL _useManualAudio;
+ BOOL _isAudioEnabled;
+ BOOL _canPlayOrRecord;
+ BOOL _isInterrupted;
+}
+
+@synthesize session = _session;
+@synthesize delegates = _delegates;
+@synthesize ignoresPreferredAttributeConfigurationErrors =
+ _ignoresPreferredAttributeConfigurationErrors;
+
++ (instancetype)sharedInstance {
+ static dispatch_once_t onceToken;
+ static RTC_OBJC_TYPE(RTCAudioSession) *sharedInstance = nil;
+ dispatch_once(&onceToken, ^{
+ sharedInstance = [[self alloc] init];
+ });
+ return sharedInstance;
+}
+
+- (instancetype)init {
+ return [self initWithAudioSession:[AVAudioSession sharedInstance]];
+}
+
+/** This initializer provides a way for unit tests to inject a fake/mock audio session. */
+- (instancetype)initWithAudioSession:(id)audioSession {
+ if (self = [super init]) {
+ _session = audioSession;
+
+ NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
+ [center addObserver:self
+ selector:@selector(handleInterruptionNotification:)
+ name:AVAudioSessionInterruptionNotification
+ object:nil];
+ [center addObserver:self
+ selector:@selector(handleRouteChangeNotification:)
+ name:AVAudioSessionRouteChangeNotification
+ object:nil];
+ [center addObserver:self
+ selector:@selector(handleMediaServicesWereLost:)
+ name:AVAudioSessionMediaServicesWereLostNotification
+ object:nil];
+ [center addObserver:self
+ selector:@selector(handleMediaServicesWereReset:)
+ name:AVAudioSessionMediaServicesWereResetNotification
+ object:nil];
+ // Posted on the main thread when the primary audio from other applications
+ // starts and stops. Foreground applications may use this notification as a
+ // hint to enable or disable audio that is secondary.
+ [center addObserver:self
+ selector:@selector(handleSilenceSecondaryAudioHintNotification:)
+ name:AVAudioSessionSilenceSecondaryAudioHintNotification
+ object:nil];
+ // Also track foreground event in order to deal with interruption ended situation.
+ [center addObserver:self
+ selector:@selector(handleApplicationDidBecomeActive:)
+ name:UIApplicationDidBecomeActiveNotification
+ object:nil];
+ [_session addObserver:self
+ forKeyPath:kRTCAudioSessionOutputVolumeSelector
+ options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
+ context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class];
+
+ RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): init.", self);
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [[NSNotificationCenter defaultCenter] removeObserver:self];
+ [_session removeObserver:self
+ forKeyPath:kRTCAudioSessionOutputVolumeSelector
+ context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class];
+ RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): dealloc.", self);
+}
+
+- (NSString *)description {
+ NSString *format = @"RTC_OBJC_TYPE(RTCAudioSession): {\n"
+ " category: %@\n"
+ " categoryOptions: %ld\n"
+ " mode: %@\n"
+ " isActive: %d\n"
+ " sampleRate: %.2f\n"
+ " IOBufferDuration: %f\n"
+ " outputNumberOfChannels: %ld\n"
+ " inputNumberOfChannels: %ld\n"
+ " outputLatency: %f\n"
+ " inputLatency: %f\n"
+ " outputVolume: %f\n"
+ "}";
+ NSString *description = [NSString stringWithFormat:format,
+ self.category, (long)self.categoryOptions, self.mode,
+ self.isActive, self.sampleRate, self.IOBufferDuration,
+ self.outputNumberOfChannels, self.inputNumberOfChannels,
+ self.outputLatency, self.inputLatency, self.outputVolume];
+ return description;
+}
+
+- (void)setIsActive:(BOOL)isActive {
+ @synchronized(self) {
+ _isActive = isActive;
+ }
+}
+
+- (BOOL)isActive {
+ @synchronized(self) {
+ return _isActive;
+ }
+}
+
+- (void)setUseManualAudio:(BOOL)useManualAudio {
+ @synchronized(self) {
+ if (_useManualAudio == useManualAudio) {
+ return;
+ }
+ _useManualAudio = useManualAudio;
+ }
+ [self updateCanPlayOrRecord];
+}
+
+- (BOOL)useManualAudio {
+ @synchronized(self) {
+ return _useManualAudio;
+ }
+}
+
+- (void)setIsAudioEnabled:(BOOL)isAudioEnabled {
+ @synchronized(self) {
+ if (_isAudioEnabled == isAudioEnabled) {
+ return;
+ }
+ _isAudioEnabled = isAudioEnabled;
+ }
+ [self updateCanPlayOrRecord];
+}
+
+- (BOOL)isAudioEnabled {
+ @synchronized(self) {
+ return _isAudioEnabled;
+ }
+}
+
+- (void)setIgnoresPreferredAttributeConfigurationErrors:
+ (BOOL)ignoresPreferredAttributeConfigurationErrors {
+ @synchronized(self) {
+ if (_ignoresPreferredAttributeConfigurationErrors ==
+ ignoresPreferredAttributeConfigurationErrors) {
+ return;
+ }
+ _ignoresPreferredAttributeConfigurationErrors = ignoresPreferredAttributeConfigurationErrors;
+ }
+}
+
+- (BOOL)ignoresPreferredAttributeConfigurationErrors {
+ @synchronized(self) {
+ return _ignoresPreferredAttributeConfigurationErrors;
+ }
+}
+
+// TODO(tkchin): Check for duplicates.
+- (void)addDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
+ RTCLog(@"Adding delegate: (%p)", delegate);
+ if (!delegate) {
+ return;
+ }
+ @synchronized(self) {
+ _delegates.push_back(delegate);
+ [self removeZeroedDelegates];
+ }
+}
+
+- (void)removeDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
+ RTCLog(@"Removing delegate: (%p)", delegate);
+ if (!delegate) {
+ return;
+ }
+ @synchronized(self) {
+ _delegates.erase(std::remove(_delegates.begin(),
+ _delegates.end(),
+ delegate),
+ _delegates.end());
+ [self removeZeroedDelegates];
+ }
+}
+
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wthread-safety-analysis"
+
+- (void)lockForConfiguration {
+ RTC_CHECK(!mutex_locked);
+ _mutex.Lock();
+ mutex_locked = true;
+}
+
+- (void)unlockForConfiguration {
+ mutex_locked = false;
+ _mutex.Unlock();
+}
+
+#pragma clang diagnostic pop
+
+#pragma mark - AVAudioSession proxy methods
+
+- (NSString *)category {
+ return self.session.category;
+}
+
+- (AVAudioSessionCategoryOptions)categoryOptions {
+ return self.session.categoryOptions;
+}
+
+- (NSString *)mode {
+ return self.session.mode;
+}
+
+- (BOOL)secondaryAudioShouldBeSilencedHint {
+ return self.session.secondaryAudioShouldBeSilencedHint;
+}
+
+- (AVAudioSessionRouteDescription *)currentRoute {
+ return self.session.currentRoute;
+}
+
+- (NSInteger)maximumInputNumberOfChannels {
+ return self.session.maximumInputNumberOfChannels;
+}
+
+- (NSInteger)maximumOutputNumberOfChannels {
+ return self.session.maximumOutputNumberOfChannels;
+}
+
+- (float)inputGain {
+ return self.session.inputGain;
+}
+
+- (BOOL)inputGainSettable {
+ return self.session.inputGainSettable;
+}
+
+- (BOOL)inputAvailable {
+ return self.session.inputAvailable;
+}
+
+- (NSArray<AVAudioSessionDataSourceDescription *> *)inputDataSources {
+ return self.session.inputDataSources;
+}
+
+- (AVAudioSessionDataSourceDescription *)inputDataSource {
+ return self.session.inputDataSource;
+}
+
+- (NSArray<AVAudioSessionDataSourceDescription *> *)outputDataSources {
+ return self.session.outputDataSources;
+}
+
+- (AVAudioSessionDataSourceDescription *)outputDataSource {
+ return self.session.outputDataSource;
+}
+
+- (double)sampleRate {
+ return self.session.sampleRate;
+}
+
+- (double)preferredSampleRate {
+ return self.session.preferredSampleRate;
+}
+
+- (NSInteger)inputNumberOfChannels {
+ return self.session.inputNumberOfChannels;
+}
+
+- (NSInteger)outputNumberOfChannels {
+ return self.session.outputNumberOfChannels;
+}
+
+- (float)outputVolume {
+ return self.session.outputVolume;
+}
+
+- (NSTimeInterval)inputLatency {
+ return self.session.inputLatency;
+}
+
+- (NSTimeInterval)outputLatency {
+ return self.session.outputLatency;
+}
+
+- (NSTimeInterval)IOBufferDuration {
+ return self.session.IOBufferDuration;
+}
+
+- (NSTimeInterval)preferredIOBufferDuration {
+ return self.session.preferredIOBufferDuration;
+}
+
+- (BOOL)setActive:(BOOL)active
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ int activationCount = _activationCount.load();
+ if (!active && activationCount == 0) {
+ RTCLogWarning(@"Attempting to deactivate without prior activation.");
+ }
+ [self notifyWillSetActive:active];
+ BOOL success = YES;
+ BOOL isActive = self.isActive;
+ // Keep a local error so we can log it.
+ NSError *error = nil;
+ BOOL shouldSetActive =
+ (active && !isActive) || (!active && isActive && activationCount == 1);
+ // Attempt to activate if we're not active.
+ // Attempt to deactivate if we're active and it's the last unbalanced call.
+ if (shouldSetActive) {
+ AVAudioSession *session = self.session;
+ // AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation is used to ensure
+ // that other audio sessions that were interrupted by our session can return
+ // to their active state. It is recommended for VoIP apps to use this
+ // option.
+ AVAudioSessionSetActiveOptions options =
+ active ? 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
+ success = [session setActive:active
+ withOptions:options
+ error:&error];
+ if (outError) {
+ *outError = error;
+ }
+ }
+ if (success) {
+ if (active) {
+ if (shouldSetActive) {
+ self.isActive = active;
+ if (self.isInterrupted) {
+ self.isInterrupted = NO;
+ [self notifyDidEndInterruptionWithShouldResumeSession:YES];
+ }
+ }
+ [self incrementActivationCount];
+ [self notifyDidSetActive:active];
+ }
+ } else {
+ RTCLogError(@"Failed to setActive:%d. Error: %@",
+ active, error.localizedDescription);
+ [self notifyFailedToSetActive:active error:error];
+ }
+ // Set isActive and decrement activation count on deactivation
+ // whether or not it succeeded.
+ if (!active) {
+ self.isActive = active;
+ [self notifyDidSetActive:active];
+ [self decrementActivationCount];
+ }
+ RTCLog(@"Number of current activations: %d", _activationCount.load());
+ return success;
+}
+
+- (BOOL)setCategory:(NSString *)category
+ withOptions:(AVAudioSessionCategoryOptions)options
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setCategory:category withOptions:options error:outError];
+}
+
+- (BOOL)setMode:(NSString *)mode error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setMode:mode error:outError];
+}
+
+- (BOOL)setInputGain:(float)gain error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setInputGain:gain error:outError];
+}
+
+- (BOOL)setPreferredSampleRate:(double)sampleRate error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setPreferredSampleRate:sampleRate error:outError];
+}
+
+- (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setPreferredIOBufferDuration:duration error:outError];
+}
+
+- (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setPreferredInputNumberOfChannels:count error:outError];
+}
+- (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setPreferredOutputNumberOfChannels:count error:outError];
+}
+
+- (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session overrideOutputAudioPort:portOverride error:outError];
+}
+
+- (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setPreferredInput:inPort error:outError];
+}
+
+- (BOOL)setInputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setInputDataSource:dataSource error:outError];
+}
+
+- (BOOL)setOutputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
+ error:(NSError **)outError {
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ return [self.session setOutputDataSource:dataSource error:outError];
+}
+
+#pragma mark - Notifications
+
+- (void)handleInterruptionNotification:(NSNotification *)notification {
+ NSNumber* typeNumber =
+ notification.userInfo[AVAudioSessionInterruptionTypeKey];
+ AVAudioSessionInterruptionType type =
+ (AVAudioSessionInterruptionType)typeNumber.unsignedIntegerValue;
+ switch (type) {
+ case AVAudioSessionInterruptionTypeBegan:
+ RTCLog(@"Audio session interruption began.");
+ self.isActive = NO;
+ self.isInterrupted = YES;
+ [self notifyDidBeginInterruption];
+ break;
+ case AVAudioSessionInterruptionTypeEnded: {
+ RTCLog(@"Audio session interruption ended.");
+ self.isInterrupted = NO;
+ [self updateAudioSessionAfterEvent];
+ NSNumber *optionsNumber =
+ notification.userInfo[AVAudioSessionInterruptionOptionKey];
+ AVAudioSessionInterruptionOptions options =
+ optionsNumber.unsignedIntegerValue;
+ BOOL shouldResume =
+ options & AVAudioSessionInterruptionOptionShouldResume;
+ [self notifyDidEndInterruptionWithShouldResumeSession:shouldResume];
+ break;
+ }
+ }
+}
+
+- (void)handleRouteChangeNotification:(NSNotification *)notification {
+ // Get reason for current route change.
+ NSNumber* reasonNumber =
+ notification.userInfo[AVAudioSessionRouteChangeReasonKey];
+ AVAudioSessionRouteChangeReason reason =
+ (AVAudioSessionRouteChangeReason)reasonNumber.unsignedIntegerValue;
+ RTCLog(@"Audio route changed:");
+ switch (reason) {
+ case AVAudioSessionRouteChangeReasonUnknown:
+ RTCLog(@"Audio route changed: ReasonUnknown");
+ break;
+ case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
+ RTCLog(@"Audio route changed: NewDeviceAvailable");
+ break;
+ case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
+ RTCLog(@"Audio route changed: OldDeviceUnavailable");
+ break;
+ case AVAudioSessionRouteChangeReasonCategoryChange:
+ RTCLog(@"Audio route changed: CategoryChange to :%@",
+ self.session.category);
+ break;
+ case AVAudioSessionRouteChangeReasonOverride:
+ RTCLog(@"Audio route changed: Override");
+ break;
+ case AVAudioSessionRouteChangeReasonWakeFromSleep:
+ RTCLog(@"Audio route changed: WakeFromSleep");
+ break;
+ case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
+ RTCLog(@"Audio route changed: NoSuitableRouteForCategory");
+ break;
+ case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
+ RTCLog(@"Audio route changed: RouteConfigurationChange");
+ break;
+ }
+ AVAudioSessionRouteDescription* previousRoute =
+ notification.userInfo[AVAudioSessionRouteChangePreviousRouteKey];
+ // Log previous route configuration.
+ RTCLog(@"Previous route: %@\nCurrent route:%@",
+ previousRoute, self.session.currentRoute);
+ [self notifyDidChangeRouteWithReason:reason previousRoute:previousRoute];
+}
+
+- (void)handleMediaServicesWereLost:(NSNotification *)notification {
+ RTCLog(@"Media services were lost.");
+ [self updateAudioSessionAfterEvent];
+ [self notifyMediaServicesWereLost];
+}
+
+- (void)handleMediaServicesWereReset:(NSNotification *)notification {
+ RTCLog(@"Media services were reset.");
+ [self updateAudioSessionAfterEvent];
+ [self notifyMediaServicesWereReset];
+}
+
+- (void)handleSilenceSecondaryAudioHintNotification:(NSNotification *)notification {
+ // TODO(henrika): just adding logs here for now until we know if we are ever
+ // see this notification and might be affected by it or if further actions
+ // are required.
+ NSNumber *typeNumber =
+ notification.userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey];
+ AVAudioSessionSilenceSecondaryAudioHintType type =
+ (AVAudioSessionSilenceSecondaryAudioHintType)typeNumber.unsignedIntegerValue;
+ switch (type) {
+ case AVAudioSessionSilenceSecondaryAudioHintTypeBegin:
+ RTCLog(@"Another application's primary audio has started.");
+ break;
+ case AVAudioSessionSilenceSecondaryAudioHintTypeEnd:
+ RTCLog(@"Another application's primary audio has stopped.");
+ break;
+ }
+}
+
+- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
+ BOOL isInterrupted = self.isInterrupted;
+ RTCLog(@"Application became active after an interruption. Treating as interruption "
+ "end. isInterrupted changed from %d to 0.",
+ isInterrupted);
+ if (isInterrupted) {
+ self.isInterrupted = NO;
+ [self updateAudioSessionAfterEvent];
+ }
+ // Always treat application becoming active as an interruption end event.
+ [self notifyDidEndInterruptionWithShouldResumeSession:YES];
+}
+
+#pragma mark - Private
+
++ (NSError *)lockError {
+ NSDictionary *userInfo =
+ @{NSLocalizedDescriptionKey : @"Must call lockForConfiguration before calling this method."};
+ NSError *error = [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
+ code:kRTCAudioSessionErrorLockRequired
+ userInfo:userInfo];
+ return error;
+}
+
+- (std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> >)delegates {
+ @synchronized(self) {
+ // Note: this returns a copy.
+ return _delegates;
+ }
+}
+
+// TODO(tkchin): check for duplicates.
+- (void)pushDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
+ @synchronized(self) {
+ _delegates.insert(_delegates.begin(), delegate);
+ }
+}
+
+- (void)removeZeroedDelegates {
+ @synchronized(self) {
+ _delegates.erase(
+ std::remove_if(_delegates.begin(),
+ _delegates.end(),
+ [](id delegate) -> bool { return delegate == nil; }),
+ _delegates.end());
+ }
+}
+
+- (int)activationCount {
+ return _activationCount.load();
+}
+
+- (int)incrementActivationCount {
+ RTCLog(@"Incrementing activation count.");
+ return _activationCount.fetch_add(1) + 1;
+}
+
+- (NSInteger)decrementActivationCount {
+ RTCLog(@"Decrementing activation count.");
+ return _activationCount.fetch_sub(1) - 1;
+}
+
+- (int)webRTCSessionCount {
+ return _webRTCSessionCount.load();
+}
+
+- (BOOL)canPlayOrRecord {
+ return !self.useManualAudio || self.isAudioEnabled;
+}
+
+- (BOOL)isInterrupted {
+ @synchronized(self) {
+ return _isInterrupted;
+ }
+}
+
+- (void)setIsInterrupted:(BOOL)isInterrupted {
+ @synchronized(self) {
+ if (_isInterrupted == isInterrupted) {
+ return;
+ }
+ _isInterrupted = isInterrupted;
+ }
+}
+
+- (BOOL)checkLock:(NSError **)outError {
+ if (!mutex_locked) {
+ if (outError) {
+ *outError = [RTC_OBJC_TYPE(RTCAudioSession) lockError];
+ }
+ return NO;
+ }
+ return YES;
+}
+
+- (BOOL)beginWebRTCSession:(NSError **)outError {
+ if (outError) {
+ *outError = nil;
+ }
+ _webRTCSessionCount.fetch_add(1);
+ [self notifyDidStartPlayOrRecord];
+ return YES;
+}
+
+- (BOOL)endWebRTCSession:(NSError **)outError {
+ if (outError) {
+ *outError = nil;
+ }
+ _webRTCSessionCount.fetch_sub(1);
+ [self notifyDidStopPlayOrRecord];
+ return YES;
+}
+
+- (BOOL)configureWebRTCSession:(NSError **)outError {
+ if (outError) {
+ *outError = nil;
+ }
+ RTCLog(@"Configuring audio session for WebRTC.");
+
+ // Configure the AVAudioSession and activate it.
+ // Provide an error even if there isn't one so we can log it.
+ NSError *error = nil;
+ RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *webRTCConfig =
+ [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
+ if (![self setConfiguration:webRTCConfig active:YES error:&error]) {
+ RTCLogError(@"Failed to set WebRTC audio configuration: %@",
+ error.localizedDescription);
+ // Do not call setActive:NO if setActive:YES failed.
+ if (outError) {
+ *outError = error;
+ }
+ return NO;
+ }
+
+ // Ensure that the device currently supports audio input.
+ // TODO(tkchin): Figure out if this is really necessary.
+ if (!self.inputAvailable) {
+ RTCLogError(@"No audio input path is available!");
+ [self unconfigureWebRTCSession:nil];
+ if (outError) {
+ *outError = [self configurationErrorWithDescription:@"No input path."];
+ }
+ return NO;
+ }
+
+ // It can happen (e.g. in combination with BT devices) that the attempt to set
+ // the preferred sample rate for WebRTC (48kHz) fails. If so, make a new
+ // configuration attempt using the sample rate that worked using the active
+ // audio session. A typical case is that only 8 or 16kHz can be set, e.g. in
+ // combination with BT headsets. Using this "trick" seems to avoid a state
+ // where Core Audio asks for a different number of audio frames than what the
+ // session's I/O buffer duration corresponds to.
+ // TODO(henrika): this fix resolves bugs.webrtc.org/6004 but it has only been
+ // tested on a limited set of iOS devices and BT devices.
+ double sessionSampleRate = self.sampleRate;
+ double preferredSampleRate = webRTCConfig.sampleRate;
+ if (sessionSampleRate != preferredSampleRate) {
+ RTCLogWarning(
+ @"Current sample rate (%.2f) is not the preferred rate (%.2f)",
+ sessionSampleRate, preferredSampleRate);
+ if (![self setPreferredSampleRate:sessionSampleRate
+ error:&error]) {
+ RTCLogError(@"Failed to set preferred sample rate: %@",
+ error.localizedDescription);
+ if (outError) {
+ *outError = error;
+ }
+ }
+ }
+
+ return YES;
+}
+
+- (BOOL)unconfigureWebRTCSession:(NSError **)outError {
+ if (outError) {
+ *outError = nil;
+ }
+ RTCLog(@"Unconfiguring audio session for WebRTC.");
+ [self setActive:NO error:outError];
+
+ return YES;
+}
+
+- (NSError *)configurationErrorWithDescription:(NSString *)description {
+ NSDictionary* userInfo = @{
+ NSLocalizedDescriptionKey: description,
+ };
+ return [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
+ code:kRTCAudioSessionErrorConfiguration
+ userInfo:userInfo];
+}
+
+- (void)updateAudioSessionAfterEvent {
+ BOOL shouldActivate = self.activationCount > 0;
+ AVAudioSessionSetActiveOptions options = shouldActivate ?
+ 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
+ NSError *error = nil;
+ if ([self.session setActive:shouldActivate
+ withOptions:options
+ error:&error]) {
+ self.isActive = shouldActivate;
+ } else {
+ RTCLogError(@"Failed to set session active to %d. Error:%@",
+ shouldActivate, error.localizedDescription);
+ }
+}
+
+- (void)updateCanPlayOrRecord {
+ BOOL canPlayOrRecord = NO;
+ BOOL shouldNotify = NO;
+ @synchronized(self) {
+ canPlayOrRecord = !self.useManualAudio || self.isAudioEnabled;
+ if (_canPlayOrRecord == canPlayOrRecord) {
+ return;
+ }
+ _canPlayOrRecord = canPlayOrRecord;
+ shouldNotify = YES;
+ }
+ if (shouldNotify) {
+ [self notifyDidChangeCanPlayOrRecord:canPlayOrRecord];
+ }
+}
+
+- (void)audioSessionDidActivate:(AVAudioSession *)session {
+ if (_session != session) {
+ RTCLogError(@"audioSessionDidActivate called on different AVAudioSession");
+ }
+ RTCLog(@"Audio session was externally activated.");
+ [self incrementActivationCount];
+ self.isActive = YES;
+ // When a CallKit call begins, it's possible that we receive an interruption
+ // begin without a corresponding end. Since we know that we have an activated
+ // audio session at this point, just clear any saved interruption flag since
+ // the app may never be foregrounded during the duration of the call.
+ if (self.isInterrupted) {
+ RTCLog(@"Clearing interrupted state due to external activation.");
+ self.isInterrupted = NO;
+ }
+ // Treat external audio session activation as an end interruption event.
+ [self notifyDidEndInterruptionWithShouldResumeSession:YES];
+}
+
+- (void)audioSessionDidDeactivate:(AVAudioSession *)session {
+ if (_session != session) {
+ RTCLogError(@"audioSessionDidDeactivate called on different AVAudioSession");
+ }
+ RTCLog(@"Audio session was externally deactivated.");
+ self.isActive = NO;
+ [self decrementActivationCount];
+}
+
+- (void)observeValueForKeyPath:(NSString *)keyPath
+ ofObject:(id)object
+ change:(NSDictionary *)change
+ context:(void *)context {
+ if (context == (__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class) {
+ if (object == _session) {
+ NSNumber *newVolume = change[NSKeyValueChangeNewKey];
+ RTCLog(@"OutputVolumeDidChange to %f", newVolume.floatValue);
+ [self notifyDidChangeOutputVolume:newVolume.floatValue];
+ }
+ } else {
+ [super observeValueForKeyPath:keyPath
+ ofObject:object
+ change:change
+ context:context];
+ }
+}
+
+- (void)notifyAudioUnitStartFailedWithError:(OSStatus)error {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSession:audioUnitStartFailedWithError:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSession:self
+ audioUnitStartFailedWithError:[NSError errorWithDomain:kRTCAudioSessionErrorDomain
+ code:error
+ userInfo:nil]];
+ }
+ }
+}
+
+- (void)notifyDidBeginInterruption {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSessionDidBeginInterruption:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSessionDidBeginInterruption:self];
+ }
+ }
+}
+
+- (void)notifyDidEndInterruptionWithShouldResumeSession:
+ (BOOL)shouldResumeSession {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSessionDidEndInterruption:shouldResumeSession:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSessionDidEndInterruption:self
+ shouldResumeSession:shouldResumeSession];
+ }
+ }
+}
+
+- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
+ previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSessionDidChangeRoute:reason:previousRoute:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSessionDidChangeRoute:self
+ reason:reason
+ previousRoute:previousRoute];
+ }
+ }
+}
+
+- (void)notifyMediaServicesWereLost {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSessionMediaServerTerminated:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSessionMediaServerTerminated:self];
+ }
+ }
+}
+
+- (void)notifyMediaServicesWereReset {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSessionMediaServerReset:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSessionMediaServerReset:self];
+ }
+ }
+}
+
+- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSession:didChangeCanPlayOrRecord:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSession:self didChangeCanPlayOrRecord:canPlayOrRecord];
+ }
+ }
+}
+
+- (void)notifyDidStartPlayOrRecord {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSessionDidStartPlayOrRecord:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSessionDidStartPlayOrRecord:self];
+ }
+ }
+}
+
+- (void)notifyDidStopPlayOrRecord {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSessionDidStopPlayOrRecord:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSessionDidStopPlayOrRecord:self];
+ }
+ }
+}
+
+- (void)notifyDidChangeOutputVolume:(float)volume {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSession:didChangeOutputVolume:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSession:self didChangeOutputVolume:volume];
+ }
+ }
+}
+
+- (void)notifyDidDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches {
+ for (auto delegate : self.delegates) {
+ SEL sel = @selector(audioSession:didDetectPlayoutGlitch:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSession:self didDetectPlayoutGlitch:totalNumberOfGlitches];
+ }
+ }
+}
+
+- (void)notifyWillSetActive:(BOOL)active {
+ for (id delegate : self.delegates) {
+ SEL sel = @selector(audioSession:willSetActive:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSession:self willSetActive:active];
+ }
+ }
+}
+
+- (void)notifyDidSetActive:(BOOL)active {
+ for (id delegate : self.delegates) {
+ SEL sel = @selector(audioSession:didSetActive:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSession:self didSetActive:active];
+ }
+ }
+}
+
+- (void)notifyFailedToSetActive:(BOOL)active error:(NSError *)error {
+ for (id delegate : self.delegates) {
+ SEL sel = @selector(audioSession:failedToSetActive:error:);
+ if ([delegate respondsToSelector:sel]) {
+ [delegate audioSession:self failedToSetActive:active error:error];
+ }
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.h b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.h
new file mode 100644
index 0000000000..4582b80557
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_EXTERN const int kRTCAudioSessionPreferredNumberOfChannels;
+RTC_EXTERN const double kRTCAudioSessionHighPerformanceSampleRate;
+RTC_EXTERN const double kRTCAudioSessionLowComplexitySampleRate;
+RTC_EXTERN const double kRTCAudioSessionHighPerformanceIOBufferDuration;
+RTC_EXTERN const double kRTCAudioSessionLowComplexityIOBufferDuration;
+
+// Struct to hold configuration values.
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCAudioSessionConfiguration) : NSObject
+
+@property(nonatomic, strong) NSString *category;
+@property(nonatomic, assign) AVAudioSessionCategoryOptions categoryOptions;
+@property(nonatomic, strong) NSString *mode;
+@property(nonatomic, assign) double sampleRate;
+@property(nonatomic, assign) NSTimeInterval ioBufferDuration;
+@property(nonatomic, assign) NSInteger inputNumberOfChannels;
+@property(nonatomic, assign) NSInteger outputNumberOfChannels;
+
+/** Initializes configuration to defaults. */
+- (instancetype)init NS_DESIGNATED_INITIALIZER;
+
+/** Returns the current configuration of the audio session. */
++ (instancetype)currentConfiguration;
+/** Returns the configuration that WebRTC needs. */
++ (instancetype)webRTCConfiguration;
+/** Provide a way to override the default configuration. */
++ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.m b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.m
new file mode 100644
index 0000000000..39e9ac13ec
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCAudioSessionConfiguration.m
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioSessionConfiguration.h"
+#import "RTCAudioSession.h"
+
+#import "helpers/RTCDispatcher.h"
+#import "helpers/UIDevice+RTCDevice.h"
+
+// Try to use mono to save resources. Also avoids channel format conversion
+// in the I/O audio unit. Initial tests have shown that it is possible to use
+// mono natively for built-in microphones and for BT headsets but not for
+// wired headsets. Wired headsets only support stereo as native channel format
+// but it is a low cost operation to do a format conversion to mono in the
+// audio unit. Hence, we will not hit a RTC_CHECK in
+// VerifyAudioParametersForActiveAudioSession() for a mismatch between the
+// preferred number of channels and the actual number of channels.
+const int kRTCAudioSessionPreferredNumberOfChannels = 1;
+
+// Preferred hardware sample rate (unit is in Hertz). The client sample rate
+// will be set to this value as well to avoid resampling the the audio unit's
+// format converter. Note that, some devices, e.g. BT headsets, only supports
+// 8000Hz as native sample rate.
+const double kRTCAudioSessionHighPerformanceSampleRate = 48000.0;
+
+// A lower sample rate will be used for devices with only one core
+// (e.g. iPhone 4). The goal is to reduce the CPU load of the application.
+const double kRTCAudioSessionLowComplexitySampleRate = 16000.0;
+
+// Use a hardware I/O buffer size (unit is in seconds) that matches the 10ms
+// size used by WebRTC. The exact actual size will differ between devices.
+// Example: using 48kHz on iPhone 6 results in a native buffer size of
+// ~10.6667ms or 512 audio frames per buffer. The FineAudioBuffer instance will
+// take care of any buffering required to convert between native buffers and
+// buffers used by WebRTC. It is beneficial for the performance if the native
+// size is as an even multiple of 10ms as possible since it results in "clean"
+// callback sequence without bursts of callbacks back to back.
+const double kRTCAudioSessionHighPerformanceIOBufferDuration = 0.02;
+
+// Use a larger buffer size on devices with only one core (e.g. iPhone 4).
+// It will result in a lower CPU consumption at the cost of a larger latency.
+// The size of 60ms is based on instrumentation that shows a significant
+// reduction in CPU load compared with 10ms on low-end devices.
+// TODO(henrika): monitor this size and determine if it should be modified.
+const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06;
+
+static RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *gWebRTCConfiguration = nil;
+
+@implementation RTC_OBJC_TYPE (RTCAudioSessionConfiguration)
+
+@synthesize category = _category;
+@synthesize categoryOptions = _categoryOptions;
+@synthesize mode = _mode;
+@synthesize sampleRate = _sampleRate;
+@synthesize ioBufferDuration = _ioBufferDuration;
+@synthesize inputNumberOfChannels = _inputNumberOfChannels;
+@synthesize outputNumberOfChannels = _outputNumberOfChannels;
+
+- (instancetype)init {
+ if (self = [super init]) {
+ // Use a category which supports simultaneous recording and playback.
+ // By default, using this category implies that our app’s audio is
+ // nonmixable, hence activating the session will interrupt any other
+ // audio sessions which are also nonmixable.
+ _category = AVAudioSessionCategoryPlayAndRecord;
+ _categoryOptions = AVAudioSessionCategoryOptionAllowBluetooth;
+
+ // Specify mode for two-way voice communication (e.g. VoIP).
+ _mode = AVAudioSessionModeVoiceChat;
+
+ // Set the session's sample rate or the hardware sample rate.
+ // It is essential that we use the same sample rate as stream format
+ // to ensure that the I/O unit does not have to do sample rate conversion.
+ // Set the preferred audio I/O buffer duration, in seconds.
+ NSUInteger processorCount = [NSProcessInfo processInfo].processorCount;
+ // Use best sample rate and buffer duration if the CPU has more than one
+ // core.
+ if (processorCount > 1 && [UIDevice deviceType] != RTCDeviceTypeIPhone4S) {
+ _sampleRate = kRTCAudioSessionHighPerformanceSampleRate;
+ _ioBufferDuration = kRTCAudioSessionHighPerformanceIOBufferDuration;
+ } else {
+ _sampleRate = kRTCAudioSessionLowComplexitySampleRate;
+ _ioBufferDuration = kRTCAudioSessionLowComplexityIOBufferDuration;
+ }
+
+ // We try to use mono in both directions to save resources and format
+ // conversions in the audio unit. Some devices does only support stereo;
+ // e.g. wired headset on iPhone 6.
+ // TODO(henrika): add support for stereo if needed.
+ _inputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels;
+ _outputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels;
+ }
+ return self;
+}
+
++ (void)initialize {
+ gWebRTCConfiguration = [[self alloc] init];
+}
+
++ (instancetype)currentConfiguration {
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *config =
+ [[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) alloc] init];
+ config.category = session.category;
+ config.categoryOptions = session.categoryOptions;
+ config.mode = session.mode;
+ config.sampleRate = session.sampleRate;
+ config.ioBufferDuration = session.IOBufferDuration;
+ config.inputNumberOfChannels = session.inputNumberOfChannels;
+ config.outputNumberOfChannels = session.outputNumberOfChannels;
+ return config;
+}
+
++ (instancetype)webRTCConfiguration {
+ @synchronized(self) {
+ return (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)gWebRTCConfiguration;
+ }
+}
+
++ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration {
+ @synchronized(self) {
+ gWebRTCConfiguration = configuration;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h b/third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h
new file mode 100644
index 0000000000..6a75f01479
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCAudioSession.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+namespace webrtc {
+class AudioSessionObserver;
+}
+
+/** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate
+ * methods on the AudioSessionObserver.
+ */
+@interface RTCNativeAudioSessionDelegateAdapter : NSObject <RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** `observer` is a raw pointer and should be kept alive
+ * for this object's lifetime.
+ */
+- (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm b/third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm
new file mode 100644
index 0000000000..daddf314a4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNativeAudioSessionDelegateAdapter.h"
+
+#include "sdk/objc/native/src/audio/audio_session_observer.h"
+
+#import "base/RTCLogging.h"
+
+@implementation RTCNativeAudioSessionDelegateAdapter {
+ webrtc::AudioSessionObserver *_observer;
+}
+
+- (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer {
+ RTC_DCHECK(observer);
+ if (self = [super init]) {
+ _observer = observer;
+ }
+ return self;
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
+
+- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+ _observer->OnInterruptionBegin();
+}
+
+- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ shouldResumeSession:(BOOL)shouldResumeSession {
+ _observer->OnInterruptionEnd();
+}
+
+- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ reason:(AVAudioSessionRouteChangeReason)reason
+ previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
+ switch (reason) {
+ case AVAudioSessionRouteChangeReasonUnknown:
+ case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
+ case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
+ case AVAudioSessionRouteChangeReasonCategoryChange:
+ // It turns out that we see a category change (at least in iOS 9.2)
+ // when making a switch from a BT device to e.g. Speaker using the
+ // iOS Control Center and that we therefore must check if the sample
+ // rate has changed. And if so is the case, restart the audio unit.
+ case AVAudioSessionRouteChangeReasonOverride:
+ case AVAudioSessionRouteChangeReasonWakeFromSleep:
+ case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
+ _observer->OnValidRouteChange();
+ break;
+ case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
+ // The set of input and output ports has not changed, but their
+ // configuration has, e.g., a port’s selected data source has
+ // changed. Ignore this type of route change since we are focusing
+ // on detecting headset changes.
+ RTCLog(@"Ignoring RouteConfigurationChange");
+ break;
+ }
+}
+
+- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
+ _observer->OnCanPlayOrRecordChange(canPlayOrRecord);
+}
+
+- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+ didChangeOutputVolume:(float)outputVolume {
+ _observer->OnChangedOutputVolume();
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.h b/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.h
new file mode 100644
index 0000000000..370bfa70f0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoCapturer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_OBJC_EXPORT
+// Camera capture that implements RTCVideoCapturer. Delivers frames to a
+// RTCVideoCapturerDelegate (usually RTCVideoSource).
+NS_EXTENSION_UNAVAILABLE_IOS("Camera not available in app extensions.")
+@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer)
+
+// Capture session that is used for capturing. Valid from initialization to dealloc.
+@property(readonly, nonatomic) AVCaptureSession *captureSession;
+
+// Returns list of available capture devices that support video capture.
++ (NSArray<AVCaptureDevice *> *)captureDevices;
+// Returns list of formats that are supported by this class for this device.
++ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device;
+
+// Returns the most efficient supported output pixel format for this capturer.
+- (FourCharCode)preferredOutputPixelFormat;
+
+// Starts the capture session asynchronously and notifies callback on completion.
+// The device will capture video in the format given in the `format` parameter. If the pixel format
+// in `format` is supported by the WebRTC pipeline, the same pixel format will be used for the
+// output. Otherwise, the format returned by `preferredOutputPixelFormat` will be used.
+- (void)startCaptureWithDevice:(AVCaptureDevice *)device
+ format:(AVCaptureDeviceFormat *)format
+ fps:(NSInteger)fps
+ completionHandler:(nullable void (^)(NSError *_Nullable))completionHandler;
+// Stops the capture session asynchronously and notifies callback on completion.
+- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler;
+
+// Starts the capture session asynchronously.
+- (void)startCaptureWithDevice:(AVCaptureDevice *)device
+ format:(AVCaptureDeviceFormat *)format
+ fps:(NSInteger)fps;
+// Stops the capture session asynchronously.
+- (void)stopCapture;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.m
new file mode 100644
index 0000000000..98d3cf9f45
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.m
@@ -0,0 +1,535 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCCameraVideoCapturer.h"
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+#if TARGET_OS_IPHONE
+#import "helpers/UIDevice+RTCDevice.h"
+#endif
+
+#import "helpers/AVCaptureSession+DevicePosition.h"
+#import "helpers/RTCDispatcher+Private.h"
+#include "rtc_base/system/gcd_helpers.h"
+
+const int64_t kNanosecondsPerSecond = 1000000000;
+
+@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
+()<AVCaptureVideoDataOutputSampleBufferDelegate> @property(nonatomic,
+ readonly) dispatch_queue_t frameQueue;
+@property(nonatomic, strong) AVCaptureDevice *currentDevice;
+@property(nonatomic, assign) BOOL hasRetriedOnFatalError;
+@property(nonatomic, assign) BOOL isRunning;
+// Will the session be running once all asynchronous operations have been completed?
+@property(nonatomic, assign) BOOL willBeRunning;
+@end
+
+@implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) {
+ AVCaptureVideoDataOutput *_videoDataOutput;
+ AVCaptureSession *_captureSession;
+ FourCharCode _preferredOutputPixelFormat;
+ FourCharCode _outputPixelFormat;
+ RTCVideoRotation _rotation;
+#if TARGET_OS_IPHONE
+ UIDeviceOrientation _orientation;
+ BOOL _generatingOrientationNotifications;
+#endif
+}
+
+@synthesize frameQueue = _frameQueue;
+@synthesize captureSession = _captureSession;
+@synthesize currentDevice = _currentDevice;
+@synthesize hasRetriedOnFatalError = _hasRetriedOnFatalError;
+@synthesize isRunning = _isRunning;
+@synthesize willBeRunning = _willBeRunning;
+
+- (instancetype)init {
+ return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]];
+}
+
+- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
+ return [self initWithDelegate:delegate captureSession:[[AVCaptureSession alloc] init]];
+}
+
+// This initializer is used for testing.
+- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate
+ captureSession:(AVCaptureSession *)captureSession {
+ if (self = [super initWithDelegate:delegate]) {
+ // Create the capture session and all relevant inputs and outputs. We need
+ // to do this in init because the application may want the capture session
+ // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
+ // created here are retained until dealloc and never recreated.
+ if (![self setupCaptureSession:captureSession]) {
+ return nil;
+ }
+ NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
+#if TARGET_OS_IPHONE
+ _orientation = UIDeviceOrientationPortrait;
+ _rotation = RTCVideoRotation_90;
+ [center addObserver:self
+ selector:@selector(deviceOrientationDidChange:)
+ name:UIDeviceOrientationDidChangeNotification
+ object:nil];
+ [center addObserver:self
+ selector:@selector(handleCaptureSessionInterruption:)
+ name:AVCaptureSessionWasInterruptedNotification
+ object:_captureSession];
+ [center addObserver:self
+ selector:@selector(handleCaptureSessionInterruptionEnded:)
+ name:AVCaptureSessionInterruptionEndedNotification
+ object:_captureSession];
+ [center addObserver:self
+ selector:@selector(handleApplicationDidBecomeActive:)
+ name:UIApplicationDidBecomeActiveNotification
+ object:[UIApplication sharedApplication]];
+#endif
+ [center addObserver:self
+ selector:@selector(handleCaptureSessionRuntimeError:)
+ name:AVCaptureSessionRuntimeErrorNotification
+ object:_captureSession];
+ [center addObserver:self
+ selector:@selector(handleCaptureSessionDidStartRunning:)
+ name:AVCaptureSessionDidStartRunningNotification
+ object:_captureSession];
+ [center addObserver:self
+ selector:@selector(handleCaptureSessionDidStopRunning:)
+ name:AVCaptureSessionDidStopRunningNotification
+ object:_captureSession];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ NSAssert(!_willBeRunning,
+ @"Session was still running in RTC_OBJC_TYPE(RTCCameraVideoCapturer) dealloc. Forgot to "
+ @"call stopCapture?");
+ [[NSNotificationCenter defaultCenter] removeObserver:self];
+}
+
++ (NSArray<AVCaptureDevice *> *)captureDevices {
+#if defined(WEBRTC_IOS) && defined(__IPHONE_10_0) && \
+ __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0
+ AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession
+ discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
+ mediaType:AVMediaTypeVideo
+ position:AVCaptureDevicePositionUnspecified];
+ return session.devices;
+#else
+ return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
+#endif
+}
+
++ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
+ // Support opening the device in any format. We make sure it's converted to a format we
+ // can handle, if needed, in the method `-setupVideoDataOutput`.
+ return device.formats;
+}
+
+- (FourCharCode)preferredOutputPixelFormat {
+ return _preferredOutputPixelFormat;
+}
+
+- (void)startCaptureWithDevice:(AVCaptureDevice *)device
+ format:(AVCaptureDeviceFormat *)format
+ fps:(NSInteger)fps {
+ [self startCaptureWithDevice:device format:format fps:fps completionHandler:nil];
+}
+
+- (void)stopCapture {
+ [self stopCaptureWithCompletionHandler:nil];
+}
+
+- (void)startCaptureWithDevice:(AVCaptureDevice *)device
+ format:(AVCaptureDeviceFormat *)format
+ fps:(NSInteger)fps
+ completionHandler:(nullable void (^)(NSError *_Nullable error))completionHandler {
+ _willBeRunning = YES;
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
+
+#if TARGET_OS_IPHONE
+ dispatch_async(dispatch_get_main_queue(), ^{
+ if (!self->_generatingOrientationNotifications) {
+ [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
+ self->_generatingOrientationNotifications = YES;
+ }
+ });
+#endif
+
+ self.currentDevice = device;
+
+ NSError *error = nil;
+ if (![self.currentDevice lockForConfiguration:&error]) {
+ RTCLogError(@"Failed to lock device %@. Error: %@",
+ self.currentDevice,
+ error.userInfo);
+ if (completionHandler) {
+ completionHandler(error);
+ }
+ self.willBeRunning = NO;
+ return;
+ }
+ [self reconfigureCaptureSessionInput];
+ [self updateOrientation];
+ [self updateDeviceCaptureFormat:format fps:fps];
+ [self updateVideoDataOutputPixelFormat:format];
+ [self.captureSession startRunning];
+ [self.currentDevice unlockForConfiguration];
+ self.isRunning = YES;
+ if (completionHandler) {
+ completionHandler(nil);
+ }
+ }];
+}
+
+- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
+ _willBeRunning = NO;
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ RTCLogInfo("Stop");
+ self.currentDevice = nil;
+ for (AVCaptureDeviceInput *oldInput in [self.captureSession.inputs copy]) {
+ [self.captureSession removeInput:oldInput];
+ }
+ [self.captureSession stopRunning];
+
+#if TARGET_OS_IPHONE
+ dispatch_async(dispatch_get_main_queue(), ^{
+ if (self->_generatingOrientationNotifications) {
+ [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
+ self->_generatingOrientationNotifications = NO;
+ }
+ });
+#endif
+ self.isRunning = NO;
+ if (completionHandler) {
+ completionHandler();
+ }
+ }];
+}
+
+#pragma mark iOS notifications
+
+#if TARGET_OS_IPHONE
+- (void)deviceOrientationDidChange:(NSNotification *)notification {
+ [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ [self updateOrientation];
+ }];
+}
+#endif
+
+#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection {
+ NSParameterAssert(captureOutput == _videoDataOutput);
+
+ if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
+ !CMSampleBufferDataIsReady(sampleBuffer)) {
+ return;
+ }
+
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ if (pixelBuffer == nil) {
+ return;
+ }
+
+#if TARGET_OS_IPHONE
+ // Default to portrait orientation on iPhone.
+ BOOL usingFrontCamera = NO;
+ // Check the image's EXIF for the camera the image came from as the image could have been
+ // delayed as we set alwaysDiscardsLateVideoFrames to NO.
+ AVCaptureDevicePosition cameraPosition =
+ [AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
+ if (cameraPosition != AVCaptureDevicePositionUnspecified) {
+ usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
+ } else {
+ AVCaptureDeviceInput *deviceInput =
+ (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input;
+ usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position;
+ }
+ switch (_orientation) {
+ case UIDeviceOrientationPortrait:
+ _rotation = RTCVideoRotation_90;
+ break;
+ case UIDeviceOrientationPortraitUpsideDown:
+ _rotation = RTCVideoRotation_270;
+ break;
+ case UIDeviceOrientationLandscapeLeft:
+ _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
+ break;
+ case UIDeviceOrientationLandscapeRight:
+ _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
+ break;
+ case UIDeviceOrientationFaceUp:
+ case UIDeviceOrientationFaceDown:
+ case UIDeviceOrientationUnknown:
+ // Ignore.
+ break;
+ }
+#else
+ // No rotation on Mac.
+ _rotation = RTCVideoRotation_0;
+#endif
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
+ int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
+ kNanosecondsPerSecond;
+ RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
+ rotation:_rotation
+ timeStampNs:timeStampNs];
+ [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection {
+#if TARGET_OS_IPHONE
+ CFStringRef droppedReason =
+ CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, nil);
+#else
+ // DroppedFrameReason unavailable on macOS.
+ CFStringRef droppedReason = nil;
+#endif
+ RTCLogError(@"Dropped sample buffer. Reason: %@", (__bridge NSString *)droppedReason);
+}
+
+#pragma mark - AVCaptureSession notifications
+
+- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
+ NSString *reasonString = nil;
+#if TARGET_OS_IPHONE
+ NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
+ if (reason) {
+ switch (reason.intValue) {
+ case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
+ reasonString = @"VideoDeviceNotAvailableInBackground";
+ break;
+ case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
+ reasonString = @"AudioDeviceInUseByAnotherClient";
+ break;
+ case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
+ reasonString = @"VideoDeviceInUseByAnotherClient";
+ break;
+ case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
+ reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
+ break;
+ }
+ }
+#endif
+ RTCLog(@"Capture session interrupted: %@", reasonString);
+}
+
+- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
+ RTCLog(@"Capture session interruption ended.");
+}
+
+- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
+ NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
+ RTCLogError(@"Capture session runtime error: %@", error);
+
+ [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+#if TARGET_OS_IPHONE
+ if (error.code == AVErrorMediaServicesWereReset) {
+ [self handleNonFatalError];
+ } else {
+ [self handleFatalError];
+ }
+#else
+ [self handleFatalError];
+#endif
+ }];
+}
+
+- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
+ RTCLog(@"Capture session started.");
+
+ [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ // If we successfully restarted after an unknown
+ // error, allow future retries on fatal errors.
+ self.hasRetriedOnFatalError = NO;
+ }];
+}
+
+- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
+ RTCLog(@"Capture session stopped.");
+}
+
+- (void)handleFatalError {
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ if (!self.hasRetriedOnFatalError) {
+ RTCLogWarning(@"Attempting to recover from fatal capture error.");
+ [self handleNonFatalError];
+ self.hasRetriedOnFatalError = YES;
+ } else {
+ RTCLogError(@"Previous fatal error recovery failed.");
+ }
+ }];
+}
+
+- (void)handleNonFatalError {
+ [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ RTCLog(@"Restarting capture session after error.");
+ if (self.isRunning) {
+ [self.captureSession startRunning];
+ }
+ }];
+}
+
+#if TARGET_OS_IPHONE
+
+#pragma mark - UIApplication notifications
+
+- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ if (self.isRunning && !self.captureSession.isRunning) {
+ RTCLog(@"Restarting capture session on active.");
+ [self.captureSession startRunning];
+ }
+ }];
+}
+
+#endif // TARGET_OS_IPHONE
+
+#pragma mark - Private
+
+- (dispatch_queue_t)frameQueue {
+ if (!_frameQueue) {
+ _frameQueue = RTCDispatchQueueCreateWithTarget(
+ "org.webrtc.cameravideocapturer.video",
+ DISPATCH_QUEUE_SERIAL,
+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
+ }
+ return _frameQueue;
+}
+
+- (BOOL)setupCaptureSession:(AVCaptureSession *)captureSession {
+ NSAssert(_captureSession == nil, @"Setup capture session called twice.");
+ _captureSession = captureSession;
+#if defined(WEBRTC_IOS)
+ _captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
+ _captureSession.usesApplicationAudioSession = NO;
+#endif
+ [self setupVideoDataOutput];
+ // Add the output.
+ if (![_captureSession canAddOutput:_videoDataOutput]) {
+ RTCLogError(@"Video data output unsupported.");
+ return NO;
+ }
+ [_captureSession addOutput:_videoDataOutput];
+
+ return YES;
+}
+
+- (void)setupVideoDataOutput {
+ NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
+ AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
+
+ // `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the
+ // device with the most efficient output format first. Find the first format that we support.
+ NSSet<NSNumber *> *supportedPixelFormats =
+ [RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats];
+ NSMutableOrderedSet *availablePixelFormats =
+ [NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
+ [availablePixelFormats intersectSet:supportedPixelFormats];
+ NSNumber *pixelFormat = availablePixelFormats.firstObject;
+ NSAssert(pixelFormat, @"Output device has no supported formats.");
+
+ _preferredOutputPixelFormat = [pixelFormat unsignedIntValue];
+ _outputPixelFormat = _preferredOutputPixelFormat;
+ videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
+ videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
+ [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
+ _videoDataOutput = videoDataOutput;
+}
+
+- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
+ FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
+ if (![[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats] containsObject:@(mediaSubType)]) {
+ mediaSubType = _preferredOutputPixelFormat;
+ }
+
+ if (mediaSubType != _outputPixelFormat) {
+ _outputPixelFormat = mediaSubType;
+ }
+
+ // Update videoSettings with dimensions, as some virtual cameras, e.g. Snap Camera, may not work
+ // otherwise.
+ CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ _videoDataOutput.videoSettings = @{
+ (id)kCVPixelBufferWidthKey : @(dimensions.width),
+ (id)kCVPixelBufferHeightKey : @(dimensions.height),
+ (id)kCVPixelBufferPixelFormatTypeKey : @(_outputPixelFormat),
+ };
+}
+
+#pragma mark - Private, called inside capture queue
+
+- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
+ NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
+ @"updateDeviceCaptureFormat must be called on the capture queue.");
+ @try {
+ _currentDevice.activeFormat = format;
+ _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
+ } @catch (NSException *exception) {
+ RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo);
+ return;
+ }
+}
+
+- (void)reconfigureCaptureSessionInput {
+ NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
+ @"reconfigureCaptureSessionInput must be called on the capture queue.");
+ NSError *error = nil;
+ AVCaptureDeviceInput *input =
+ [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
+ if (!input) {
+ RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
+ return;
+ }
+ [_captureSession beginConfiguration];
+ for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
+ [_captureSession removeInput:oldInput];
+ }
+ if ([_captureSession canAddInput:input]) {
+ [_captureSession addInput:input];
+ } else {
+ RTCLogError(@"Cannot add camera as an input to the session.");
+ }
+ [_captureSession commitConfiguration];
+}
+
+- (void)updateOrientation {
+ NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
+ @"updateOrientation must be called on the capture queue.");
+#if TARGET_OS_IPHONE
+ _orientation = [UIDevice currentDevice].orientation;
+#endif
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.h b/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.h
new file mode 100644
index 0000000000..19262c64cf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCVideoCapturer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Error passing block.
+ */
+typedef void (^RTCFileVideoCapturerErrorBlock)(NSError *error);
+
+/**
+ * Captures buffers from bundled video file.
+ *
+ * See @c RTCVideoCapturer for more info on capturers.
+ */
+RTC_OBJC_EXPORT
+
+NS_CLASS_AVAILABLE_IOS(10)
+@interface RTC_OBJC_TYPE (RTCFileVideoCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer)
+
+/**
+ * Starts asynchronous capture of frames from video file.
+ *
+ * Capturing is not started if error occurs. Underlying error will be
+ * relayed in the errorBlock if one is provided.
+ * Successfully captured video frames will be passed to the delegate.
+ *
+ * @param nameOfFile The name of the bundled video file to be read.
+ * @errorBlock block to be executed upon error.
+ */
+- (void)startCapturingFromFileNamed:(NSString *)nameOfFile
+ onError:(__nullable RTCFileVideoCapturerErrorBlock)errorBlock;
+
+/**
+ * Immediately stops capture.
+ */
+- (void)stopCapture;
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m b/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m
new file mode 100644
index 0000000000..bcf1506259
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/capturer/RTCFileVideoCapturer.m
@@ -0,0 +1,215 @@
+/**
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCFileVideoCapturer.h"
+
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+#include "rtc_base/system/gcd_helpers.h"
+
+NSString *const kRTCFileVideoCapturerErrorDomain =
+ @"org.webrtc.RTC_OBJC_TYPE(RTCFileVideoCapturer)";
+
+typedef NS_ENUM(NSInteger, RTCFileVideoCapturerErrorCode) {
+ RTCFileVideoCapturerErrorCode_CapturerRunning = 2000,
+ RTCFileVideoCapturerErrorCode_FileNotFound
+};
+
+typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
+ RTCFileVideoCapturerStatusNotInitialized,
+ RTCFileVideoCapturerStatusStarted,
+ RTCFileVideoCapturerStatusStopped
+};
+
+@interface RTC_OBJC_TYPE (RTCFileVideoCapturer)
+() @property(nonatomic, assign) CMTime lastPresentationTime;
+@property(nonatomic, strong) NSURL *fileURL;
+@end
+
+@implementation RTC_OBJC_TYPE (RTCFileVideoCapturer) {
+ AVAssetReader *_reader;
+ AVAssetReaderTrackOutput *_outTrack;
+ RTCFileVideoCapturerStatus _status;
+ dispatch_queue_t _frameQueue;
+}
+
+@synthesize lastPresentationTime = _lastPresentationTime;
+@synthesize fileURL = _fileURL;
+
+- (void)startCapturingFromFileNamed:(NSString *)nameOfFile
+ onError:(RTCFileVideoCapturerErrorBlock)errorBlock {
+ if (_status == RTCFileVideoCapturerStatusStarted) {
+ NSError *error =
+ [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
+ code:RTCFileVideoCapturerErrorCode_CapturerRunning
+ userInfo:@{NSUnderlyingErrorKey : @"Capturer has been started."}];
+
+ errorBlock(error);
+ return;
+ } else {
+ _status = RTCFileVideoCapturerStatusStarted;
+ }
+
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ NSString *pathForFile = [self pathForFileName:nameOfFile];
+ if (!pathForFile) {
+ NSString *errorString =
+ [NSString stringWithFormat:@"File %@ not found in bundle", nameOfFile];
+ NSError *error = [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
+ code:RTCFileVideoCapturerErrorCode_FileNotFound
+ userInfo:@{NSUnderlyingErrorKey : errorString}];
+ errorBlock(error);
+ return;
+ }
+
+ self.lastPresentationTime = CMTimeMake(0, 0);
+
+ self.fileURL = [NSURL fileURLWithPath:pathForFile];
+ [self setupReaderOnError:errorBlock];
+ });
+}
+
+- (void)setupReaderOnError:(RTCFileVideoCapturerErrorBlock)errorBlock {
+ AVURLAsset *asset = [AVURLAsset URLAssetWithURL:_fileURL options:nil];
+
+ NSArray *allTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
+ NSError *error = nil;
+
+ _reader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
+ if (error) {
+ errorBlock(error);
+ return;
+ }
+
+ NSDictionary *options = @{
+ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
+ };
+ _outTrack =
+ [[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject outputSettings:options];
+ [_reader addOutput:_outTrack];
+
+ [_reader startReading];
+ RTCLog(@"File capturer started reading");
+ [self readNextBuffer];
+}
+- (void)stopCapture {
+ _status = RTCFileVideoCapturerStatusStopped;
+ RTCLog(@"File capturer stopped.");
+}
+
+#pragma mark - Private
+
+- (nullable NSString *)pathForFileName:(NSString *)fileName {
+ NSArray *nameComponents = [fileName componentsSeparatedByString:@"."];
+ if (nameComponents.count != 2) {
+ return nil;
+ }
+
+ NSString *path =
+ [[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameComponents[1]];
+ return path;
+}
+
+- (dispatch_queue_t)frameQueue {
+ if (!_frameQueue) {
+ _frameQueue = RTCDispatchQueueCreateWithTarget(
+ "org.webrtc.filecapturer.video",
+ DISPATCH_QUEUE_SERIAL,
+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0));
+ }
+ return _frameQueue;
+}
+
+- (void)readNextBuffer {
+ if (_status == RTCFileVideoCapturerStatusStopped) {
+ [_reader cancelReading];
+ _reader = nil;
+ return;
+ }
+
+ if (_reader.status == AVAssetReaderStatusCompleted) {
+ [_reader cancelReading];
+ _reader = nil;
+ [self setupReaderOnError:nil];
+ return;
+ }
+
+ CMSampleBufferRef sampleBuffer = [_outTrack copyNextSampleBuffer];
+ if (!sampleBuffer) {
+ [self readNextBuffer];
+ return;
+ }
+ if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
+ !CMSampleBufferDataIsReady(sampleBuffer)) {
+ CFRelease(sampleBuffer);
+ [self readNextBuffer];
+ return;
+ }
+
+ [self publishSampleBuffer:sampleBuffer];
+}
+
+- (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer {
+ CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+ Float64 presentationDifference =
+ CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime));
+ _lastPresentationTime = presentationTime;
+ int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PER_SEC);
+
+ __block dispatch_source_t timer = [self createStrictTimer];
+ // Strict timer that will fire `presentationDifferenceRound` ns from now and never again.
+ dispatch_source_set_timer(timer,
+ dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound),
+ DISPATCH_TIME_FOREVER,
+ 0);
+ dispatch_source_set_event_handler(timer, ^{
+ dispatch_source_cancel(timer);
+ timer = nil;
+
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ if (!pixelBuffer) {
+ CFRelease(sampleBuffer);
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ [self readNextBuffer];
+ });
+ return;
+ }
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
+ NSTimeInterval timeStampSeconds = CACurrentMediaTime();
+ int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
+ RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
+ rotation:0
+ timeStampNs:timeStampNs];
+ CFRelease(sampleBuffer);
+
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ [self readNextBuffer];
+ });
+
+ [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
+ });
+ dispatch_activate(timer);
+}
+
+- (dispatch_source_t)createStrictTimer {
+ dispatch_source_t timer = dispatch_source_create(
+ DISPATCH_SOURCE_TYPE_TIMER, 0, DISPATCH_TIMER_STRICT, [self frameQueue]);
+ return timer;
+}
+
+- (void)dealloc {
+ [self stopCapture];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor+Private.h b/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor+Private.h
new file mode 100644
index 0000000000..b5c786be18
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor+Private.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNetworkMonitor.h"
+
+#include "sdk/objc/native/src/network_monitor_observer.h"
+
+@interface RTCNetworkMonitor ()
+
+/** `observer` is a raw pointer and should be kept alive
+ * for this object's lifetime.
+ */
+- (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer
+ NS_DESIGNATED_INITIALIZER;
+
+/** Stops the receiver from posting updates to `observer`. */
+- (void)stop;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.h b/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.h
new file mode 100644
index 0000000000..21d22f5463
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Listens for NWPathMonitor updates and forwards the results to a C++
+ * observer.
+ */
+@interface RTCNetworkMonitor : NSObject
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.mm b/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.mm
new file mode 100644
index 0000000000..7e75b2b4c0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/network/RTCNetworkMonitor.mm
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNetworkMonitor+Private.h"
+
+#import <Network/Network.h>
+
+#import "base/RTCLogging.h"
+#import "helpers/RTCDispatcher+Private.h"
+
+#include "rtc_base/string_utils.h"
+
+namespace {
+
+rtc::AdapterType AdapterTypeFromInterfaceType(nw_interface_type_t interfaceType) {
+ rtc::AdapterType adapterType = rtc::ADAPTER_TYPE_UNKNOWN;
+ switch (interfaceType) {
+ case nw_interface_type_other:
+ adapterType = rtc::ADAPTER_TYPE_UNKNOWN;
+ break;
+ case nw_interface_type_wifi:
+ adapterType = rtc::ADAPTER_TYPE_WIFI;
+ break;
+ case nw_interface_type_cellular:
+ adapterType = rtc::ADAPTER_TYPE_CELLULAR;
+ break;
+ case nw_interface_type_wired:
+ adapterType = rtc::ADAPTER_TYPE_ETHERNET;
+ break;
+ case nw_interface_type_loopback:
+ adapterType = rtc::ADAPTER_TYPE_LOOPBACK;
+ break;
+ default:
+ adapterType = rtc::ADAPTER_TYPE_UNKNOWN;
+ break;
+ }
+ return adapterType;
+}
+
+} // namespace
+
+@implementation RTCNetworkMonitor {
+ webrtc::NetworkMonitorObserver *_observer;
+ nw_path_monitor_t _pathMonitor;
+ dispatch_queue_t _monitorQueue;
+}
+
+- (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer {
+ RTC_DCHECK(observer);
+ if (self = [super init]) {
+ _observer = observer;
+ if (@available(iOS 12, *)) {
+ _pathMonitor = nw_path_monitor_create();
+ if (_pathMonitor == nil) {
+ RTCLog(@"nw_path_monitor_create failed.");
+ return nil;
+ }
+ RTCLog(@"NW path monitor created.");
+ __weak RTCNetworkMonitor *weakSelf = self;
+ nw_path_monitor_set_update_handler(_pathMonitor, ^(nw_path_t path) {
+ if (weakSelf == nil) {
+ return;
+ }
+ RTCNetworkMonitor *strongSelf = weakSelf;
+ RTCLog(@"NW path monitor: updated.");
+ nw_path_status_t status = nw_path_get_status(path);
+ if (status == nw_path_status_invalid) {
+ RTCLog(@"NW path monitor status: invalid.");
+ } else if (status == nw_path_status_unsatisfied) {
+ RTCLog(@"NW path monitor status: unsatisfied.");
+ } else if (status == nw_path_status_satisfied) {
+ RTCLog(@"NW path monitor status: satisfied.");
+ } else if (status == nw_path_status_satisfiable) {
+ RTCLog(@"NW path monitor status: satisfiable.");
+ }
+ std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp> *map =
+ new std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp>();
+ nw_path_enumerate_interfaces(
+ path, (nw_path_enumerate_interfaces_block_t) ^ (nw_interface_t interface) {
+ const char *name = nw_interface_get_name(interface);
+ nw_interface_type_t interfaceType = nw_interface_get_type(interface);
+ RTCLog(@"NW path monitor available interface: %s", name);
+ rtc::AdapterType adapterType = AdapterTypeFromInterfaceType(interfaceType);
+ map->insert(std::pair<std::string, rtc::AdapterType>(name, adapterType));
+ });
+ @synchronized(strongSelf) {
+ webrtc::NetworkMonitorObserver *observer = strongSelf->_observer;
+ if (observer) {
+ observer->OnPathUpdate(std::move(*map));
+ }
+ }
+ delete map;
+ });
+ nw_path_monitor_set_queue(
+ _pathMonitor,
+ [RTC_OBJC_TYPE(RTCDispatcher) dispatchQueueForType:RTCDispatcherTypeNetworkMonitor]);
+ nw_path_monitor_start(_pathMonitor);
+ }
+ }
+ return self;
+}
+
+- (void)cancel {
+ if (@available(iOS 12, *)) {
+ nw_path_monitor_cancel(_pathMonitor);
+ }
+}
+- (void)stop {
+ [self cancel];
+ @synchronized(self) {
+ _observer = nil;
+ }
+}
+
+- (void)dealloc {
+ [self cancel];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h
new file mode 100644
index 0000000000..e5987fe22a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMTLRenderer.h"
+
+NS_AVAILABLE(10_11, 9_0)
+@interface RTCMTLI420Renderer : RTCMTLRenderer
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm
new file mode 100644
index 0000000000..f4c76fa313
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm
@@ -0,0 +1,170 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMTLI420Renderer.h"
+
+#import <Metal/Metal.h>
+#import <MetalKit/MetalKit.h>
+
+#import "base/RTCI420Buffer.h"
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+
+#import "RTCMTLRenderer+Private.h"
+
+static NSString *const shaderSource = MTL_STRINGIFY(
+ using namespace metal;
+
+ typedef struct {
+ packed_float2 position;
+ packed_float2 texcoord;
+ } Vertex;
+
+ typedef struct {
+ float4 position[[position]];
+ float2 texcoord;
+ } Varyings;
+
+ vertex Varyings vertexPassthrough(constant Vertex *verticies[[buffer(0)]],
+ unsigned int vid[[vertex_id]]) {
+ Varyings out;
+ constant Vertex &v = verticies[vid];
+ out.position = float4(float2(v.position), 0.0, 1.0);
+ out.texcoord = v.texcoord;
+
+ return out;
+ }
+
+ fragment half4 fragmentColorConversion(
+ Varyings in[[stage_in]],
+ texture2d<float, access::sample> textureY[[texture(0)]],
+ texture2d<float, access::sample> textureU[[texture(1)]],
+ texture2d<float, access::sample> textureV[[texture(2)]]) {
+ constexpr sampler s(address::clamp_to_edge, filter::linear);
+ float y;
+ float u;
+ float v;
+ float r;
+ float g;
+ float b;
+ // Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php
+ y = textureY.sample(s, in.texcoord).r;
+ u = textureU.sample(s, in.texcoord).r;
+ v = textureV.sample(s, in.texcoord).r;
+ u = u - 0.5;
+ v = v - 0.5;
+ r = y + 1.403 * v;
+ g = y - 0.344 * u - 0.714 * v;
+ b = y + 1.770 * u;
+
+ float4 out = float4(r, g, b, 1.0);
+
+ return half4(out);
+ });
+
+@implementation RTCMTLI420Renderer {
+ // Textures.
+ id<MTLTexture> _yTexture;
+ id<MTLTexture> _uTexture;
+ id<MTLTexture> _vTexture;
+
+ MTLTextureDescriptor *_descriptor;
+ MTLTextureDescriptor *_chromaDescriptor;
+
+ int _width;
+ int _height;
+ int _chromaWidth;
+ int _chromaHeight;
+}
+
+#pragma mark - Virtual
+
+- (NSString *)shaderSource {
+ return shaderSource;
+}
+
+- (void)getWidth:(nonnull int *)width
+ height:(nonnull int *)height
+ cropWidth:(nonnull int *)cropWidth
+ cropHeight:(nonnull int *)cropHeight
+ cropX:(nonnull int *)cropX
+ cropY:(nonnull int *)cropY
+ ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ *width = frame.width;
+ *height = frame.height;
+ *cropWidth = frame.width;
+ *cropHeight = frame.height;
+ *cropX = 0;
+ *cropY = 0;
+}
+
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ if (![super setupTexturesForFrame:frame]) {
+ return NO;
+ }
+
+ id<MTLDevice> device = [self currentMetalDevice];
+ if (!device) {
+ return NO;
+ }
+
+ id<RTC_OBJC_TYPE(RTCI420Buffer)> buffer = [frame.buffer toI420];
+
+ // Luma (y) texture.
+ if (!_descriptor || _width != frame.width || _height != frame.height) {
+ _width = frame.width;
+ _height = frame.height;
+ _descriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
+ width:_width
+ height:_height
+ mipmapped:NO];
+ _descriptor.usage = MTLTextureUsageShaderRead;
+ _yTexture = [device newTextureWithDescriptor:_descriptor];
+ }
+
+ // Chroma (u,v) textures
+ [_yTexture replaceRegion:MTLRegionMake2D(0, 0, _width, _height)
+ mipmapLevel:0
+ withBytes:buffer.dataY
+ bytesPerRow:buffer.strideY];
+
+ if (!_chromaDescriptor || _chromaWidth != frame.width / 2 || _chromaHeight != frame.height / 2) {
+ _chromaWidth = frame.width / 2;
+ _chromaHeight = frame.height / 2;
+ _chromaDescriptor =
+ [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
+ width:_chromaWidth
+ height:_chromaHeight
+ mipmapped:NO];
+ _chromaDescriptor.usage = MTLTextureUsageShaderRead;
+ _uTexture = [device newTextureWithDescriptor:_chromaDescriptor];
+ _vTexture = [device newTextureWithDescriptor:_chromaDescriptor];
+ }
+
+ [_uTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
+ mipmapLevel:0
+ withBytes:buffer.dataU
+ bytesPerRow:buffer.strideU];
+ [_vTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
+ mipmapLevel:0
+ withBytes:buffer.dataV
+ bytesPerRow:buffer.strideV];
+
+ return (_uTexture != nil) && (_yTexture != nil) && (_vTexture != nil);
+}
+
+- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
+ [renderEncoder setFragmentTexture:_yTexture atIndex:0];
+ [renderEncoder setFragmentTexture:_uTexture atIndex:1];
+ [renderEncoder setFragmentTexture:_vTexture atIndex:2];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h
new file mode 100644
index 0000000000..f70e2ad5ee
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AppKit/AppKit.h>
+
+#import "RTCVideoRenderer.h"
+
+NS_AVAILABLE_MAC(10.11)
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) : NSView <RTC_OBJC_TYPE(RTCVideoRenderer)>
+
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
+
++ (BOOL)isMetalAvailable;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m
new file mode 100644
index 0000000000..625fb1caa7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMTLNSVideoView.h"
+
+#import <Metal/Metal.h>
+#import <MetalKit/MetalKit.h>
+
+#import "base/RTCVideoFrame.h"
+
+#import "RTCMTLI420Renderer.h"
+
+@interface RTC_OBJC_TYPE (RTCMTLNSVideoView)
+()<MTKViewDelegate> @property(nonatomic) id<RTCMTLRenderer> renderer;
+@property(nonatomic, strong) MTKView *metalView;
+@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
+@end
+
+@implementation RTC_OBJC_TYPE (RTCMTLNSVideoView) {
+ id<RTCMTLRenderer> _renderer;
+}
+
+@synthesize delegate = _delegate;
+@synthesize renderer = _renderer;
+@synthesize metalView = _metalView;
+@synthesize videoFrame = _videoFrame;
+
+- (instancetype)initWithFrame:(CGRect)frameRect {
+ self = [super initWithFrame:frameRect];
+ if (self) {
+ [self configure];
+ }
+ return self;
+}
+
+- (instancetype)initWithCoder:(NSCoder *)aCoder {
+ self = [super initWithCoder:aCoder];
+ if (self) {
+ [self configure];
+ }
+ return self;
+}
+
+#pragma mark - Private
+
++ (BOOL)isMetalAvailable {
+ return [MTLCopyAllDevices() count] > 0;
+}
+
+- (void)configure {
+ if ([[self class] isMetalAvailable]) {
+ _metalView = [[MTKView alloc] initWithFrame:self.bounds];
+ [self addSubview:_metalView];
+ _metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit;
+ _metalView.translatesAutoresizingMaskIntoConstraints = NO;
+ _metalView.framebufferOnly = YES;
+ _metalView.delegate = self;
+
+ _renderer = [[RTCMTLI420Renderer alloc] init];
+ if (![(RTCMTLI420Renderer *)_renderer addRenderingDestination:_metalView]) {
+ _renderer = nil;
+ };
+ }
+}
+
+- (void)updateConstraints {
+ NSDictionary *views = NSDictionaryOfVariableBindings(_metalView);
+
+ NSArray *constraintsHorizontal =
+ [NSLayoutConstraint constraintsWithVisualFormat:@"H:|-0-[_metalView]-0-|"
+ options:0
+ metrics:nil
+ views:views];
+ [self addConstraints:constraintsHorizontal];
+
+ NSArray *constraintsVertical =
+ [NSLayoutConstraint constraintsWithVisualFormat:@"V:|-0-[_metalView]-0-|"
+ options:0
+ metrics:nil
+ views:views];
+ [self addConstraints:constraintsVertical];
+ [super updateConstraints];
+}
+
+#pragma mark - MTKViewDelegate methods
+- (void)drawInMTKView:(nonnull MTKView *)view {
+ if (self.videoFrame == nil) {
+ return;
+ }
+ if (view == self.metalView) {
+ [_renderer drawFrame:self.videoFrame];
+ }
+}
+
+- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
+
+- (void)setSize:(CGSize)size {
+ _metalView.drawableSize = size;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ [self.delegate videoView:self didChangeVideoSize:size];
+ });
+ [_metalView draw];
+}
+
+- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ if (frame == nil) {
+ return;
+ }
+ self.videoFrame = [frame newI420VideoFrame];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h
new file mode 100644
index 0000000000..866b7ea17e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMTLRenderer.h"
+
+NS_AVAILABLE(10_11, 9_0)
+@interface RTCMTLNV12Renderer : RTCMTLRenderer
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm
new file mode 100644
index 0000000000..7b037c6dbc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMTLNV12Renderer.h"
+
+#import <Metal/Metal.h>
+#import <MetalKit/MetalKit.h>
+
+#import "RTCMTLRenderer+Private.h"
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+#include "rtc_base/checks.h"
+
+static NSString *const shaderSource = MTL_STRINGIFY(
+ using namespace metal;
+
+ typedef struct {
+ packed_float2 position;
+ packed_float2 texcoord;
+ } Vertex;
+
+ typedef struct {
+ float4 position[[position]];
+ float2 texcoord;
+ } Varyings;
+
+ vertex Varyings vertexPassthrough(constant Vertex *verticies[[buffer(0)]],
+ unsigned int vid[[vertex_id]]) {
+ Varyings out;
+ constant Vertex &v = verticies[vid];
+ out.position = float4(float2(v.position), 0.0, 1.0);
+ out.texcoord = v.texcoord;
+ return out;
+ }
+
+ // Receiving YCrCb textures.
+ fragment half4 fragmentColorConversion(
+ Varyings in[[stage_in]],
+ texture2d<float, access::sample> textureY[[texture(0)]],
+ texture2d<float, access::sample> textureCbCr[[texture(1)]]) {
+ constexpr sampler s(address::clamp_to_edge, filter::linear);
+ float y;
+ float2 uv;
+ y = textureY.sample(s, in.texcoord).r;
+ uv = textureCbCr.sample(s, in.texcoord).rg - float2(0.5, 0.5);
+
+ // Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php
+ float4 out = float4(y + 1.403 * uv.y, y - 0.344 * uv.x - 0.714 * uv.y, y + 1.770 * uv.x, 1.0);
+
+ return half4(out);
+ });
+
+@implementation RTCMTLNV12Renderer {
+ // Textures.
+ CVMetalTextureCacheRef _textureCache;
+ id<MTLTexture> _yTexture;
+ id<MTLTexture> _CrCbTexture;
+}
+
+- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
+ if ([super addRenderingDestination:view]) {
+ return [self initializeTextureCache];
+ }
+ return NO;
+}
+
+- (BOOL)initializeTextureCache {
+ CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice],
+ nil, &_textureCache);
+ if (status != kCVReturnSuccess) {
+ RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status);
+ return NO;
+ }
+
+ return YES;
+}
+
+- (NSString *)shaderSource {
+ return shaderSource;
+}
+
+- (void)getWidth:(nonnull int *)width
+ height:(nonnull int *)height
+ cropWidth:(nonnull int *)cropWidth
+ cropHeight:(nonnull int *)cropHeight
+ cropX:(nonnull int *)cropX
+ cropY:(nonnull int *)cropY
+ ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
+ *width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
+ *height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
+ *cropWidth = pixelBuffer.cropWidth;
+ *cropHeight = pixelBuffer.cropHeight;
+ *cropX = pixelBuffer.cropX;
+ *cropY = pixelBuffer.cropY;
+}
+
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]);
+ if (![super setupTexturesForFrame:frame]) {
+ return NO;
+ }
+ CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer;
+
+ id<MTLTexture> lumaTexture = nil;
+ id<MTLTexture> chromaTexture = nil;
+ CVMetalTextureRef outTexture = nullptr;
+
+ // Luma (y) texture.
+ int lumaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
+ int lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
+
+ int indexPlane = 0;
+ CVReturn result = CVMetalTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatR8Unorm, lumaWidth,
+ lumaHeight, indexPlane, &outTexture);
+
+ if (result == kCVReturnSuccess) {
+ lumaTexture = CVMetalTextureGetTexture(outTexture);
+ }
+
+ // Same as CFRelease except it can be passed NULL without crashing.
+ CVBufferRelease(outTexture);
+ outTexture = nullptr;
+
+ // Chroma (CrCb) texture.
+ indexPlane = 1;
+ result = CVMetalTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatRG8Unorm, lumaWidth / 2,
+ lumaHeight / 2, indexPlane, &outTexture);
+ if (result == kCVReturnSuccess) {
+ chromaTexture = CVMetalTextureGetTexture(outTexture);
+ }
+ CVBufferRelease(outTexture);
+
+ if (lumaTexture != nil && chromaTexture != nil) {
+ _yTexture = lumaTexture;
+ _CrCbTexture = chromaTexture;
+ return YES;
+ }
+ return NO;
+}
+
+- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
+ [renderEncoder setFragmentTexture:_yTexture atIndex:0];
+ [renderEncoder setFragmentTexture:_CrCbTexture atIndex:1];
+}
+
+- (void)dealloc {
+ if (_textureCache) {
+ CFRelease(_textureCache);
+ }
+}
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h
new file mode 100644
index 0000000000..9db422cd22
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMTLRenderer.h"
+
+/** @abstract RGB/BGR renderer.
+ * @discussion This renderer handles both kCVPixelFormatType_32BGRA and
+ * kCVPixelFormatType_32ARGB.
+ */
+NS_AVAILABLE(10_11, 9_0)
+@interface RTCMTLRGBRenderer : RTCMTLRenderer
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm
new file mode 100644
index 0000000000..e5dc4ef80a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMTLRGBRenderer.h"
+
+#import <Metal/Metal.h>
+#import <MetalKit/MetalKit.h>
+
+#import "RTCMTLRenderer+Private.h"
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+#include "rtc_base/checks.h"
+
+static NSString *const shaderSource = MTL_STRINGIFY(
+ using namespace metal;
+
+ typedef struct {
+ packed_float2 position;
+ packed_float2 texcoord;
+ } Vertex;
+
+ typedef struct {
+ float4 position[[position]];
+ float2 texcoord;
+ } VertexIO;
+
+ vertex VertexIO vertexPassthrough(constant Vertex *verticies[[buffer(0)]],
+ uint vid[[vertex_id]]) {
+ VertexIO out;
+ constant Vertex &v = verticies[vid];
+ out.position = float4(float2(v.position), 0.0, 1.0);
+ out.texcoord = v.texcoord;
+ return out;
+ }
+
+ fragment half4 fragmentColorConversion(VertexIO in[[stage_in]],
+ texture2d<half, access::sample> texture[[texture(0)]],
+ constant bool &isARGB[[buffer(0)]]) {
+ constexpr sampler s(address::clamp_to_edge, filter::linear);
+
+ half4 out = texture.sample(s, in.texcoord);
+ if (isARGB) {
+ out = half4(out.g, out.b, out.a, out.r);
+ }
+
+ return out;
+ });
+
+@implementation RTCMTLRGBRenderer {
+ // Textures.
+ CVMetalTextureCacheRef _textureCache;
+ id<MTLTexture> _texture;
+
+ // Uniforms.
+ id<MTLBuffer> _uniformsBuffer;
+}
+
+- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
+ if ([super addRenderingDestination:view]) {
+ return [self initializeTextureCache];
+ }
+ return NO;
+}
+
+- (BOOL)initializeTextureCache {
+ CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice],
+ nil, &_textureCache);
+ if (status != kCVReturnSuccess) {
+ RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status);
+ return NO;
+ }
+
+ return YES;
+}
+
+- (NSString *)shaderSource {
+ return shaderSource;
+}
+
+- (void)getWidth:(nonnull int *)width
+ height:(nonnull int *)height
+ cropWidth:(nonnull int *)cropWidth
+ cropHeight:(nonnull int *)cropHeight
+ cropX:(nonnull int *)cropX
+ cropY:(nonnull int *)cropY
+ ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
+ *width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
+ *height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
+ *cropWidth = pixelBuffer.cropWidth;
+ *cropHeight = pixelBuffer.cropHeight;
+ *cropX = pixelBuffer.cropX;
+ *cropY = pixelBuffer.cropY;
+}
+
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]);
+ if (![super setupTexturesForFrame:frame]) {
+ return NO;
+ }
+ CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer;
+
+ id<MTLTexture> gpuTexture = nil;
+ CVMetalTextureRef textureOut = nullptr;
+ bool isARGB;
+
+ int width = CVPixelBufferGetWidth(pixelBuffer);
+ int height = CVPixelBufferGetHeight(pixelBuffer);
+ OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
+
+ MTLPixelFormat mtlPixelFormat;
+ if (pixelFormat == kCVPixelFormatType_32BGRA) {
+ mtlPixelFormat = MTLPixelFormatBGRA8Unorm;
+ isARGB = false;
+ } else if (pixelFormat == kCVPixelFormatType_32ARGB) {
+ mtlPixelFormat = MTLPixelFormatRGBA8Unorm;
+ isARGB = true;
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ return NO;
+ }
+
+ CVReturn result = CVMetalTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault, _textureCache, pixelBuffer, nil, mtlPixelFormat,
+ width, height, 0, &textureOut);
+ if (result == kCVReturnSuccess) {
+ gpuTexture = CVMetalTextureGetTexture(textureOut);
+ }
+ CVBufferRelease(textureOut);
+
+ if (gpuTexture != nil) {
+ _texture = gpuTexture;
+ _uniformsBuffer =
+ [[self currentMetalDevice] newBufferWithBytes:&isARGB
+ length:sizeof(isARGB)
+ options:MTLResourceCPUCacheModeDefaultCache];
+ return YES;
+ }
+
+ return NO;
+}
+
+- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
+ [renderEncoder setFragmentTexture:_texture atIndex:0];
+ [renderEncoder setFragmentBuffer:_uniformsBuffer offset:0 atIndex:0];
+}
+
+- (void)dealloc {
+ if (_textureCache) {
+ CFRelease(_textureCache);
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h
new file mode 100644
index 0000000000..916d4d4430
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Metal/Metal.h>
+
+#import "RTCMTLRenderer.h"
+
+#define MTL_STRINGIFY(s) @ #s
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCMTLRenderer (Private)
+- (nullable id<MTLDevice>)currentMetalDevice;
+- (NSString *)shaderSource;
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
+- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder;
+- (void)getWidth:(nonnull int *)width
+ height:(nonnull int *)height
+ cropWidth:(nonnull int *)cropWidth
+ cropHeight:(nonnull int *)cropHeight
+ cropX:(nonnull int *)cropX
+ cropY:(nonnull int *)cropY
+ ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.h
new file mode 100644
index 0000000000..aa31545973
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#if TARGET_OS_IPHONE
+#import <UIKit/UIKit.h>
+#else
+#import <AppKit/AppKit.h>
+#endif
+
+#import "base/RTCVideoFrame.h"
+
+NS_ASSUME_NONNULL_BEGIN
+/**
+ * Protocol defining ability to render RTCVideoFrame in Metal enabled views.
+ */
+@protocol RTCMTLRenderer <NSObject>
+
+/**
+ * Method to be implemented to perform actual rendering of the provided frame.
+ *
+ * @param frame The frame to be rendered.
+ */
+- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
+
+/**
+ * Sets the provided view as rendering destination if possible.
+ *
+ * If not possible method returns NO and callers of the method are responisble for performing
+ * cleanups.
+ */
+
+#if TARGET_OS_IOS
+- (BOOL)addRenderingDestination:(__kindof UIView *)view;
+#else
+- (BOOL)addRenderingDestination:(__kindof NSView *)view;
+#endif
+
+@end
+
+/**
+ * Implementation of RTCMTLRenderer protocol.
+ */
+NS_AVAILABLE(10_11, 9_0)
+@interface RTCMTLRenderer : NSObject <RTCMTLRenderer>
+
+/** @abstract A wrapped RTCVideoRotation, or nil.
+ @discussion When not nil, the rotation of the actual frame is ignored when rendering.
+ */
+@property(atomic, nullable) NSValue *rotationOverride;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm
new file mode 100644
index 0000000000..410590a7b1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm
@@ -0,0 +1,328 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMTLRenderer+Private.h"
+
+#import <Metal/Metal.h>
+#import <MetalKit/MetalKit.h>
+
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+
+#include "api/video/video_rotation.h"
+#include "rtc_base/checks.h"
+
+// As defined in shaderSource.
+static NSString *const vertexFunctionName = @"vertexPassthrough";
+static NSString *const fragmentFunctionName = @"fragmentColorConversion";
+
+static NSString *const pipelineDescriptorLabel = @"RTCPipeline";
+static NSString *const commandBufferLabel = @"RTCCommandBuffer";
+static NSString *const renderEncoderLabel = @"RTCEncoder";
+static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
+
+// Computes the texture coordinates given rotation and cropping.
+static inline void getCubeVertexData(int cropX,
+ int cropY,
+ int cropWidth,
+ int cropHeight,
+ size_t frameWidth,
+ size_t frameHeight,
+ RTCVideoRotation rotation,
+ float *buffer) {
+ // The computed values are the adjusted texture coordinates, in [0..1].
+ // For the left and top, 0.0 means no cropping and e.g. 0.2 means we're skipping 20% of the
+ // left/top edge.
+ // For the right and bottom, 1.0 means no cropping and e.g. 0.8 means we're skipping 20% of the
+ // right/bottom edge (i.e. render up to 80% of the width/height).
+ float cropLeft = cropX / (float)frameWidth;
+ float cropRight = (cropX + cropWidth) / (float)frameWidth;
+ float cropTop = cropY / (float)frameHeight;
+ float cropBottom = (cropY + cropHeight) / (float)frameHeight;
+
+ // These arrays map the view coordinates to texture coordinates, taking cropping and rotation
+ // into account. The first two columns are view coordinates, the last two are texture coordinates.
+ switch (rotation) {
+ case RTCVideoRotation_0: {
+ float values[16] = {-1.0, -1.0, cropLeft, cropBottom,
+ 1.0, -1.0, cropRight, cropBottom,
+ -1.0, 1.0, cropLeft, cropTop,
+ 1.0, 1.0, cropRight, cropTop};
+ memcpy(buffer, &values, sizeof(values));
+ } break;
+ case RTCVideoRotation_90: {
+ float values[16] = {-1.0, -1.0, cropRight, cropBottom,
+ 1.0, -1.0, cropRight, cropTop,
+ -1.0, 1.0, cropLeft, cropBottom,
+ 1.0, 1.0, cropLeft, cropTop};
+ memcpy(buffer, &values, sizeof(values));
+ } break;
+ case RTCVideoRotation_180: {
+ float values[16] = {-1.0, -1.0, cropRight, cropTop,
+ 1.0, -1.0, cropLeft, cropTop,
+ -1.0, 1.0, cropRight, cropBottom,
+ 1.0, 1.0, cropLeft, cropBottom};
+ memcpy(buffer, &values, sizeof(values));
+ } break;
+ case RTCVideoRotation_270: {
+ float values[16] = {-1.0, -1.0, cropLeft, cropTop,
+ 1.0, -1.0, cropLeft, cropBottom,
+ -1.0, 1.0, cropRight, cropTop,
+ 1.0, 1.0, cropRight, cropBottom};
+ memcpy(buffer, &values, sizeof(values));
+ } break;
+ }
+}
+
+// The max number of command buffers in flight (submitted to GPU).
+// For now setting it up to 1.
+// In future we might use triple buffering method if it improves performance.
+static const NSInteger kMaxInflightBuffers = 1;
+
+@implementation RTCMTLRenderer {
+ __kindof MTKView *_view;
+
+ // Controller.
+ dispatch_semaphore_t _inflight_semaphore;
+
+ // Renderer.
+ id<MTLDevice> _device;
+ id<MTLCommandQueue> _commandQueue;
+ id<MTLLibrary> _defaultLibrary;
+ id<MTLRenderPipelineState> _pipelineState;
+
+ // Buffers.
+ id<MTLBuffer> _vertexBuffer;
+
+ // Values affecting the vertex buffer. Stored for comparison to avoid unnecessary recreation.
+ int _oldFrameWidth;
+ int _oldFrameHeight;
+ int _oldCropWidth;
+ int _oldCropHeight;
+ int _oldCropX;
+ int _oldCropY;
+ RTCVideoRotation _oldRotation;
+}
+
+@synthesize rotationOverride = _rotationOverride;
+
+- (instancetype)init {
+ if (self = [super init]) {
+ _inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
+ }
+
+ return self;
+}
+
+- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
+ return [self setupWithView:view];
+}
+
+#pragma mark - Private
+
+- (BOOL)setupWithView:(__kindof MTKView *)view {
+ BOOL success = NO;
+ if ([self setupMetal]) {
+ _view = view;
+ view.device = _device;
+ view.preferredFramesPerSecond = 30;
+ view.autoResizeDrawable = NO;
+
+ [self loadAssets];
+
+ float vertexBufferArray[16] = {0};
+ _vertexBuffer = [_device newBufferWithBytes:vertexBufferArray
+ length:sizeof(vertexBufferArray)
+ options:MTLResourceCPUCacheModeWriteCombined];
+ success = YES;
+ }
+ return success;
+}
+#pragma mark - Inheritance
+
+- (id<MTLDevice>)currentMetalDevice {
+ return _device;
+}
+
+- (NSString *)shaderSource {
+ RTC_DCHECK_NOTREACHED() << "Virtual method not implemented in subclass.";
+ return nil;
+}
+
+- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
+ RTC_DCHECK_NOTREACHED() << "Virtual method not implemented in subclass.";
+}
+
+- (void)getWidth:(int *)width
+ height:(int *)height
+ cropWidth:(int *)cropWidth
+ cropHeight:(int *)cropHeight
+ cropX:(int *)cropX
+ cropY:(int *)cropY
+ ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ RTC_DCHECK_NOTREACHED() << "Virtual method not implemented in subclass.";
+}
+
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ // Apply rotation override if set.
+ RTCVideoRotation rotation;
+ NSValue *rotationOverride = self.rotationOverride;
+ if (rotationOverride) {
+#if defined(__IPHONE_11_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \
+ (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
+ if (@available(iOS 11, *)) {
+ [rotationOverride getValue:&rotation size:sizeof(rotation)];
+ } else
+#endif
+ {
+ [rotationOverride getValue:&rotation];
+ }
+ } else {
+ rotation = frame.rotation;
+ }
+
+ int frameWidth, frameHeight, cropWidth, cropHeight, cropX, cropY;
+ [self getWidth:&frameWidth
+ height:&frameHeight
+ cropWidth:&cropWidth
+ cropHeight:&cropHeight
+ cropX:&cropX
+ cropY:&cropY
+ ofFrame:frame];
+
+ // Recompute the texture cropping and recreate vertexBuffer if necessary.
+ if (cropX != _oldCropX || cropY != _oldCropY || cropWidth != _oldCropWidth ||
+ cropHeight != _oldCropHeight || rotation != _oldRotation || frameWidth != _oldFrameWidth ||
+ frameHeight != _oldFrameHeight) {
+ getCubeVertexData(cropX,
+ cropY,
+ cropWidth,
+ cropHeight,
+ frameWidth,
+ frameHeight,
+ rotation,
+ (float *)_vertexBuffer.contents);
+ _oldCropX = cropX;
+ _oldCropY = cropY;
+ _oldCropWidth = cropWidth;
+ _oldCropHeight = cropHeight;
+ _oldRotation = rotation;
+ _oldFrameWidth = frameWidth;
+ _oldFrameHeight = frameHeight;
+ }
+
+ return YES;
+}
+
+#pragma mark - GPU methods
+
+- (BOOL)setupMetal {
+ // Set the view to use the default device.
+ _device = MTLCreateSystemDefaultDevice();
+ if (!_device) {
+ return NO;
+ }
+
+ // Create a new command queue.
+ _commandQueue = [_device newCommandQueue];
+
+ // Load metal library from source.
+ NSError *libraryError = nil;
+ NSString *shaderSource = [self shaderSource];
+
+ id<MTLLibrary> sourceLibrary =
+ [_device newLibraryWithSource:shaderSource options:NULL error:&libraryError];
+
+ if (libraryError) {
+ RTCLogError(@"Metal: Library with source failed\n%@", libraryError);
+ return NO;
+ }
+
+ if (!sourceLibrary) {
+ RTCLogError(@"Metal: Failed to load library. %@", libraryError);
+ return NO;
+ }
+ _defaultLibrary = sourceLibrary;
+
+ return YES;
+}
+
+- (void)loadAssets {
+ id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
+ id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
+
+ MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescriptor alloc] init];
+ pipelineDescriptor.label = pipelineDescriptorLabel;
+ pipelineDescriptor.vertexFunction = vertexFunction;
+ pipelineDescriptor.fragmentFunction = fragmentFunction;
+ pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat;
+ pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid;
+ NSError *error = nil;
+ _pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:&error];
+
+ if (!_pipelineState) {
+ RTCLogError(@"Metal: Failed to create pipeline state. %@", error);
+ }
+}
+
+- (void)render {
+ id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
+ commandBuffer.label = commandBufferLabel;
+
+ __block dispatch_semaphore_t block_semaphore = _inflight_semaphore;
+ [commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> _Nonnull) {
+ // GPU work completed.
+ dispatch_semaphore_signal(block_semaphore);
+ }];
+
+ MTLRenderPassDescriptor *renderPassDescriptor = _view.currentRenderPassDescriptor;
+ if (renderPassDescriptor) { // Valid drawable.
+ id<MTLRenderCommandEncoder> renderEncoder =
+ [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];
+ renderEncoder.label = renderEncoderLabel;
+
+ // Set context state.
+ [renderEncoder pushDebugGroup:renderEncoderDebugGroup];
+ [renderEncoder setRenderPipelineState:_pipelineState];
+ [renderEncoder setVertexBuffer:_vertexBuffer offset:0 atIndex:0];
+ [self uploadTexturesToRenderEncoder:renderEncoder];
+
+ [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
+ vertexStart:0
+ vertexCount:4
+ instanceCount:1];
+ [renderEncoder popDebugGroup];
+ [renderEncoder endEncoding];
+
+ [commandBuffer presentDrawable:_view.currentDrawable];
+ }
+
+ // CPU work is completed, GPU work can be started.
+ [commandBuffer commit];
+}
+
+#pragma mark - RTCMTLRenderer
+
+- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ @autoreleasepool {
+ // Wait until the inflight (curently sent to GPU) command buffer
+ // has completed the GPU work.
+ dispatch_semaphore_wait(_inflight_semaphore, DISPATCH_TIME_FOREVER);
+
+ if ([self setupTexturesForFrame:frame]) {
+ [self render];
+ } else {
+ dispatch_semaphore_signal(_inflight_semaphore);
+ }
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.h
new file mode 100644
index 0000000000..3320d12076
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoFrame.h"
+#import "RTCVideoRenderer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * RTCMTLVideoView is thin wrapper around MTKView.
+ *
+ * It has id<RTCVideoRenderer> property that renders video frames in the view's
+ * bounds using Metal.
+ */
+NS_CLASS_AVAILABLE_IOS(9)
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCMTLVideoView) : UIView<RTC_OBJC_TYPE(RTCVideoRenderer)>
+
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
+
+@property(nonatomic) UIViewContentMode videoContentMode;
+
+/** @abstract Enables/disables rendering.
+ */
+@property(nonatomic, getter=isEnabled) BOOL enabled;
+
+/** @abstract Wrapped RTCVideoRotation, or nil.
+ */
+@property(nonatomic, nullable) NSValue* rotationOverride;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.m
new file mode 100644
index 0000000000..c5d9e4385f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLVideoView.m
@@ -0,0 +1,265 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCMTLVideoView.h"
+
+#import <Metal/Metal.h>
+#import <MetalKit/MetalKit.h>
+
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+#import "RTCMTLI420Renderer.h"
+#import "RTCMTLNV12Renderer.h"
+#import "RTCMTLRGBRenderer.h"
+
+// To avoid unreconized symbol linker errors, we're taking advantage of the objc runtime.
+// Linking errors occur when compiling for architectures that don't support Metal.
+#define MTKViewClass NSClassFromString(@"MTKView")
+#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer")
+#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
+#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer")
+
+@interface RTC_OBJC_TYPE (RTCMTLVideoView)
+()<MTKViewDelegate> @property(nonatomic) RTCMTLI420Renderer *rendererI420;
+@property(nonatomic) RTCMTLNV12Renderer *rendererNV12;
+@property(nonatomic) RTCMTLRGBRenderer *rendererRGB;
+@property(nonatomic) MTKView *metalView;
+@property(atomic) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
+@property(nonatomic) CGSize videoFrameSize;
+@property(nonatomic) int64_t lastFrameTimeNs;
+@end
+
+@implementation RTC_OBJC_TYPE (RTCMTLVideoView)
+
+@synthesize delegate = _delegate;
+@synthesize rendererI420 = _rendererI420;
+@synthesize rendererNV12 = _rendererNV12;
+@synthesize rendererRGB = _rendererRGB;
+@synthesize metalView = _metalView;
+@synthesize videoFrame = _videoFrame;
+@synthesize videoFrameSize = _videoFrameSize;
+@synthesize lastFrameTimeNs = _lastFrameTimeNs;
+@synthesize rotationOverride = _rotationOverride;
+
+- (instancetype)initWithFrame:(CGRect)frameRect {
+ self = [super initWithFrame:frameRect];
+ if (self) {
+ [self configure];
+ }
+ return self;
+}
+
+- (instancetype)initWithCoder:(NSCoder *)aCoder {
+ self = [super initWithCoder:aCoder];
+ if (self) {
+ [self configure];
+ }
+ return self;
+}
+
+- (BOOL)isEnabled {
+ return !self.metalView.paused;
+}
+
+- (void)setEnabled:(BOOL)enabled {
+ self.metalView.paused = !enabled;
+}
+
+- (UIViewContentMode)videoContentMode {
+ return self.metalView.contentMode;
+}
+
+- (void)setVideoContentMode:(UIViewContentMode)mode {
+ self.metalView.contentMode = mode;
+}
+
+#pragma mark - Private
+
++ (BOOL)isMetalAvailable {
+ return MTLCreateSystemDefaultDevice() != nil;
+}
+
++ (MTKView *)createMetalView:(CGRect)frame {
+ return [[MTKViewClass alloc] initWithFrame:frame];
+}
+
++ (RTCMTLNV12Renderer *)createNV12Renderer {
+ return [[RTCMTLNV12RendererClass alloc] init];
+}
+
++ (RTCMTLI420Renderer *)createI420Renderer {
+ return [[RTCMTLI420RendererClass alloc] init];
+}
+
++ (RTCMTLRGBRenderer *)createRGBRenderer {
+ return [[RTCMTLRGBRenderer alloc] init];
+}
+
+- (void)configure {
+ NSAssert([RTC_OBJC_TYPE(RTCMTLVideoView) isMetalAvailable],
+ @"Metal not availiable on this device");
+
+ self.metalView = [RTC_OBJC_TYPE(RTCMTLVideoView) createMetalView:self.bounds];
+ self.metalView.delegate = self;
+ self.metalView.contentMode = UIViewContentModeScaleAspectFill;
+ [self addSubview:self.metalView];
+ self.videoFrameSize = CGSizeZero;
+}
+
+- (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled {
+ [super setMultipleTouchEnabled:multipleTouchEnabled];
+ self.metalView.multipleTouchEnabled = multipleTouchEnabled;
+}
+
+- (void)layoutSubviews {
+ [super layoutSubviews];
+
+ CGRect bounds = self.bounds;
+ self.metalView.frame = bounds;
+ if (!CGSizeEqualToSize(self.videoFrameSize, CGSizeZero)) {
+ self.metalView.drawableSize = [self drawableSize];
+ } else {
+ self.metalView.drawableSize = bounds.size;
+ }
+}
+
+#pragma mark - MTKViewDelegate methods
+
+- (void)drawInMTKView:(nonnull MTKView *)view {
+ NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance.");
+ RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = self.videoFrame;
+ // Skip rendering if we've already rendered this frame.
+ if (!videoFrame || videoFrame.width <= 0 || videoFrame.height <= 0 ||
+ videoFrame.timeStampNs == self.lastFrameTimeNs) {
+ return;
+ }
+
+ if (CGRectIsEmpty(view.bounds)) {
+ return;
+ }
+
+ RTCMTLRenderer *renderer;
+ if ([videoFrame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer;
+ const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
+ if (pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB) {
+ if (!self.rendererRGB) {
+ self.rendererRGB = [RTC_OBJC_TYPE(RTCMTLVideoView) createRGBRenderer];
+ if (![self.rendererRGB addRenderingDestination:self.metalView]) {
+ self.rendererRGB = nil;
+ RTCLogError(@"Failed to create RGB renderer");
+ return;
+ }
+ }
+ renderer = self.rendererRGB;
+ } else {
+ if (!self.rendererNV12) {
+ self.rendererNV12 = [RTC_OBJC_TYPE(RTCMTLVideoView) createNV12Renderer];
+ if (![self.rendererNV12 addRenderingDestination:self.metalView]) {
+ self.rendererNV12 = nil;
+ RTCLogError(@"Failed to create NV12 renderer");
+ return;
+ }
+ }
+ renderer = self.rendererNV12;
+ }
+ } else {
+ if (!self.rendererI420) {
+ self.rendererI420 = [RTC_OBJC_TYPE(RTCMTLVideoView) createI420Renderer];
+ if (![self.rendererI420 addRenderingDestination:self.metalView]) {
+ self.rendererI420 = nil;
+ RTCLogError(@"Failed to create I420 renderer");
+ return;
+ }
+ }
+ renderer = self.rendererI420;
+ }
+
+ renderer.rotationOverride = self.rotationOverride;
+
+ [renderer drawFrame:videoFrame];
+ self.lastFrameTimeNs = videoFrame.timeStampNs;
+}
+
+- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
+}
+
+#pragma mark -
+
+- (void)setRotationOverride:(NSValue *)rotationOverride {
+ _rotationOverride = rotationOverride;
+
+ self.metalView.drawableSize = [self drawableSize];
+ [self setNeedsLayout];
+}
+
+- (RTCVideoRotation)frameRotation {
+ if (self.rotationOverride) {
+ RTCVideoRotation rotation;
+ if (@available(iOS 11, *)) {
+ [self.rotationOverride getValue:&rotation size:sizeof(rotation)];
+ } else {
+ [self.rotationOverride getValue:&rotation];
+ }
+ return rotation;
+ }
+
+ return self.videoFrame.rotation;
+}
+
+- (CGSize)drawableSize {
+ // Flip width/height if the rotations are not the same.
+ CGSize videoFrameSize = self.videoFrameSize;
+ RTCVideoRotation frameRotation = [self frameRotation];
+
+ BOOL useLandscape =
+ (frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180);
+ BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) ||
+ (self.videoFrame.rotation == RTCVideoRotation_180);
+
+ if (useLandscape == sizeIsLandscape) {
+ return videoFrameSize;
+ } else {
+ return CGSizeMake(videoFrameSize.height, videoFrameSize.width);
+ }
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
+
+- (void)setSize:(CGSize)size {
+ __weak RTC_OBJC_TYPE(RTCMTLVideoView) *weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ RTC_OBJC_TYPE(RTCMTLVideoView) *strongSelf = weakSelf;
+
+ strongSelf.videoFrameSize = size;
+ CGSize drawableSize = [strongSelf drawableSize];
+
+ strongSelf.metalView.drawableSize = drawableSize;
+ [strongSelf setNeedsLayout];
+ [strongSelf.delegate videoView:self didChangeVideoSize:size];
+ });
+}
+
+- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ if (!self.isEnabled) {
+ return;
+ }
+
+ if (frame == nil) {
+ RTCLogInfo(@"Incoming frame is nil. Exiting render callback.");
+ return;
+ }
+ self.videoFrame = frame;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.h
new file mode 100644
index 0000000000..71a073ab21
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoViewShading.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView
+ * and RTCEAGLVideoView if no external shader is specified. This shader will render
+ * the video in a rectangle without any color or geometric transformations.
+ */
+@interface RTCDefaultShader : NSObject <RTC_OBJC_TYPE (RTCVideoViewShading)>
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm
new file mode 100644
index 0000000000..51dca3223d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm
@@ -0,0 +1,207 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDefaultShader.h"
+
+#if TARGET_OS_IPHONE
+#import <OpenGLES/ES3/gl.h>
+#else
+#import <OpenGL/gl3.h>
+#endif
+
+#import "RTCOpenGLDefines.h"
+#import "RTCShader.h"
+#import "base/RTCLogging.h"
+
+#include "absl/types/optional.h"
+
+static const int kYTextureUnit = 0;
+static const int kUTextureUnit = 1;
+static const int kVTextureUnit = 2;
+static const int kUvTextureUnit = 1;
+
+// Fragment shader converts YUV values from input textures into a final RGB
+// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
+static const char kI420FragmentShaderSource[] =
+ SHADER_VERSION
+ "precision highp float;"
+ FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
+ "uniform lowp sampler2D s_textureY;\n"
+ "uniform lowp sampler2D s_textureU;\n"
+ "uniform lowp sampler2D s_textureV;\n"
+ FRAGMENT_SHADER_OUT
+ "void main() {\n"
+ " float y, u, v, r, g, b;\n"
+ " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
+ " u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
+ " v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
+ " u = u - 0.5;\n"
+ " v = v - 0.5;\n"
+ " r = y + 1.403 * v;\n"
+ " g = y - 0.344 * u - 0.714 * v;\n"
+ " b = y + 1.770 * u;\n"
+ " " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
+ " }\n";
+
+static const char kNV12FragmentShaderSource[] =
+ SHADER_VERSION
+ "precision mediump float;"
+ FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
+ "uniform lowp sampler2D s_textureY;\n"
+ "uniform lowp sampler2D s_textureUV;\n"
+ FRAGMENT_SHADER_OUT
+ "void main() {\n"
+ " mediump float y;\n"
+ " mediump vec2 uv;\n"
+ " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
+ " uv = " FRAGMENT_SHADER_TEXTURE "(s_textureUV, v_texcoord).ra -\n"
+ " vec2(0.5, 0.5);\n"
+ " " FRAGMENT_SHADER_COLOR " = vec4(y + 1.403 * uv.y,\n"
+ " y - 0.344 * uv.x - 0.714 * uv.y,\n"
+ " y + 1.770 * uv.x,\n"
+ " 1.0);\n"
+ " }\n";
+
+@implementation RTCDefaultShader {
+ GLuint _vertexBuffer;
+ GLuint _vertexArray;
+ // Store current rotation and only upload new vertex data when rotation changes.
+ absl::optional<RTCVideoRotation> _currentRotation;
+
+ GLuint _i420Program;
+ GLuint _nv12Program;
+}
+
+- (void)dealloc {
+ glDeleteProgram(_i420Program);
+ glDeleteProgram(_nv12Program);
+ glDeleteBuffers(1, &_vertexBuffer);
+ glDeleteVertexArrays(1, &_vertexArray);
+}
+
+- (BOOL)createAndSetupI420Program {
+ NSAssert(!_i420Program, @"I420 program already created");
+ _i420Program = RTCCreateProgramFromFragmentSource(kI420FragmentShaderSource);
+ if (!_i420Program) {
+ return NO;
+ }
+ GLint ySampler = glGetUniformLocation(_i420Program, "s_textureY");
+ GLint uSampler = glGetUniformLocation(_i420Program, "s_textureU");
+ GLint vSampler = glGetUniformLocation(_i420Program, "s_textureV");
+
+ if (ySampler < 0 || uSampler < 0 || vSampler < 0) {
+ RTCLog(@"Failed to get uniform variable locations in I420 shader");
+ glDeleteProgram(_i420Program);
+ _i420Program = 0;
+ return NO;
+ }
+
+ glUseProgram(_i420Program);
+ glUniform1i(ySampler, kYTextureUnit);
+ glUniform1i(uSampler, kUTextureUnit);
+ glUniform1i(vSampler, kVTextureUnit);
+
+ return YES;
+}
+
+- (BOOL)createAndSetupNV12Program {
+ NSAssert(!_nv12Program, @"NV12 program already created");
+ _nv12Program = RTCCreateProgramFromFragmentSource(kNV12FragmentShaderSource);
+ if (!_nv12Program) {
+ return NO;
+ }
+ GLint ySampler = glGetUniformLocation(_nv12Program, "s_textureY");
+ GLint uvSampler = glGetUniformLocation(_nv12Program, "s_textureUV");
+
+ if (ySampler < 0 || uvSampler < 0) {
+ RTCLog(@"Failed to get uniform variable locations in NV12 shader");
+ glDeleteProgram(_nv12Program);
+ _nv12Program = 0;
+ return NO;
+ }
+
+ glUseProgram(_nv12Program);
+ glUniform1i(ySampler, kYTextureUnit);
+ glUniform1i(uvSampler, kUvTextureUnit);
+
+ return YES;
+}
+
+- (BOOL)prepareVertexBufferWithRotation:(RTCVideoRotation)rotation {
+ if (!_vertexBuffer && !RTCCreateVertexBuffer(&_vertexBuffer, &_vertexArray)) {
+ RTCLog(@"Failed to setup vertex buffer");
+ return NO;
+ }
+#if !TARGET_OS_IPHONE
+ glBindVertexArray(_vertexArray);
+#endif
+ glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
+ if (!_currentRotation || rotation != *_currentRotation) {
+ _currentRotation = absl::optional<RTCVideoRotation>(rotation);
+ RTCSetVertexData(*_currentRotation);
+ }
+ return YES;
+}
+
+- (void)applyShadingForFrameWithWidth:(int)width
+ height:(int)height
+ rotation:(RTCVideoRotation)rotation
+ yPlane:(GLuint)yPlane
+ uPlane:(GLuint)uPlane
+ vPlane:(GLuint)vPlane {
+ if (![self prepareVertexBufferWithRotation:rotation]) {
+ return;
+ }
+
+ if (!_i420Program && ![self createAndSetupI420Program]) {
+ RTCLog(@"Failed to setup I420 program");
+ return;
+ }
+
+ glUseProgram(_i420Program);
+
+ glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kYTextureUnit));
+ glBindTexture(GL_TEXTURE_2D, yPlane);
+
+ glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kUTextureUnit));
+ glBindTexture(GL_TEXTURE_2D, uPlane);
+
+ glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kVTextureUnit));
+ glBindTexture(GL_TEXTURE_2D, vPlane);
+
+ glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+}
+
+- (void)applyShadingForFrameWithWidth:(int)width
+ height:(int)height
+ rotation:(RTCVideoRotation)rotation
+ yPlane:(GLuint)yPlane
+ uvPlane:(GLuint)uvPlane {
+ if (![self prepareVertexBufferWithRotation:rotation]) {
+ return;
+ }
+
+ if (!_nv12Program && ![self createAndSetupNV12Program]) {
+ RTCLog(@"Failed to setup NV12 shader");
+ return;
+ }
+
+ glUseProgram(_nv12Program);
+
+ glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kYTextureUnit));
+ glBindTexture(GL_TEXTURE_2D, yPlane);
+
+ glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + kUvTextureUnit));
+ glBindTexture(GL_TEXTURE_2D, uvPlane);
+
+ glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h
new file mode 100644
index 0000000000..b78501e9e6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
+// refreshes, which should be 30fps. We wrap the display link in order to avoid
+// a retain cycle since CADisplayLink takes a strong reference onto its target.
+// The timer is paused by default.
+@interface RTCDisplayLinkTimer : NSObject
+
+@property(nonatomic) BOOL isPaused;
+
+- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler;
+- (void)invalidate;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m
new file mode 100644
index 0000000000..906bb898d6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDisplayLinkTimer.h"
+
+#import <UIKit/UIKit.h>
+
+@implementation RTCDisplayLinkTimer {
+ CADisplayLink *_displayLink;
+ void (^_timerHandler)(void);
+}
+
+- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler {
+ NSParameterAssert(timerHandler);
+ if (self = [super init]) {
+ _timerHandler = timerHandler;
+ _displayLink =
+ [CADisplayLink displayLinkWithTarget:self
+ selector:@selector(displayLinkDidFire:)];
+ _displayLink.paused = YES;
+#if __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0
+ _displayLink.preferredFramesPerSecond = 30;
+#else
+ [_displayLink setFrameInterval:2];
+#endif
+ [_displayLink addToRunLoop:[NSRunLoop currentRunLoop]
+ forMode:NSRunLoopCommonModes];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self invalidate];
+}
+
+- (BOOL)isPaused {
+ return _displayLink.paused;
+}
+
+- (void)setIsPaused:(BOOL)isPaused {
+ _displayLink.paused = isPaused;
+}
+
+- (void)invalidate {
+ [_displayLink invalidate];
+}
+
+- (void)displayLinkDidFire:(CADisplayLink *)displayLink {
+ _timerHandler();
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h
new file mode 100644
index 0000000000..24b26cd602
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoRenderer.h"
+#import "RTCVideoViewShading.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCEAGLVideoView);
+
+/**
+ * RTCEAGLVideoView is an RTCVideoRenderer which renders video frames
+ * in its bounds using OpenGLES 2.0 or OpenGLES 3.0.
+ */
+RTC_OBJC_EXPORT
+NS_EXTENSION_UNAVAILABLE_IOS("Rendering not available in app extensions.")
+@interface RTC_OBJC_TYPE (RTCEAGLVideoView) : UIView <RTC_OBJC_TYPE(RTCVideoRenderer)>
+
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
+
+- (instancetype)initWithFrame:(CGRect)frame
+ shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
+ NS_DESIGNATED_INITIALIZER;
+
+- (instancetype)initWithCoder:(NSCoder *)aDecoder
+ shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
+ NS_DESIGNATED_INITIALIZER;
+
+/** @abstract Wrapped RTCVideoRotation, or nil.
+ */
+@property(nonatomic, nullable) NSValue *rotationOverride;
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m
new file mode 100644
index 0000000000..89e62d2ce7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m
@@ -0,0 +1,295 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCEAGLVideoView.h"
+
+#import <GLKit/GLKit.h>
+
+#import "RTCDefaultShader.h"
+#import "RTCDisplayLinkTimer.h"
+#import "RTCI420TextureCache.h"
+#import "RTCNV12TextureCache.h"
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+// RTC_OBJC_TYPE(RTCEAGLVideoView) wraps a GLKView which is setup with
+// enableSetNeedsDisplay = NO for the purpose of gaining control of
+// exactly when to call -[GLKView display]. This need for extra
+// control is required to avoid triggering method calls on GLKView
+// that results in attempting to bind the underlying render buffer
+// when the drawable size would be empty which would result in the
+// error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is
+// the method that will trigger the binding of the render
+// buffer. Because the standard behaviour of -[UIView setNeedsDisplay]
+// is disabled for the reasons above, the RTC_OBJC_TYPE(RTCEAGLVideoView) maintains
+// its own `isDirty` flag.
+
+@interface RTC_OBJC_TYPE (RTCEAGLVideoView)
+()<GLKViewDelegate>
+ // `videoFrame` is set when we receive a frame from a worker thread and is read
+ // from the display link callback so atomicity is required.
+ @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
+@property(nonatomic, readonly) GLKView *glkView;
+@end
+
+@implementation RTC_OBJC_TYPE (RTCEAGLVideoView) {
+ RTCDisplayLinkTimer *_timer;
+ EAGLContext *_glContext;
+ // This flag should only be set and read on the main thread (e.g. by
+ // setNeedsDisplay)
+ BOOL _isDirty;
+ id<RTC_OBJC_TYPE(RTCVideoViewShading)> _shader;
+ RTCNV12TextureCache *_nv12TextureCache;
+ RTCI420TextureCache *_i420TextureCache;
+ // As timestamps should be unique between frames, will store last
+ // drawn frame timestamp instead of the whole frame to reduce memory usage.
+ int64_t _lastDrawnFrameTimeStampNs;
+}
+
+@synthesize delegate = _delegate;
+@synthesize videoFrame = _videoFrame;
+@synthesize glkView = _glkView;
+@synthesize rotationOverride = _rotationOverride;
+
+- (instancetype)initWithFrame:(CGRect)frame {
+ return [self initWithFrame:frame shader:[[RTCDefaultShader alloc] init]];
+}
+
+- (instancetype)initWithCoder:(NSCoder *)aDecoder {
+ return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]];
+}
+
+- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
+ if (self = [super initWithFrame:frame]) {
+ _shader = shader;
+ if (![self configure]) {
+ return nil;
+ }
+ }
+ return self;
+}
+
+- (instancetype)initWithCoder:(NSCoder *)aDecoder
+ shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
+ if (self = [super initWithCoder:aDecoder]) {
+ _shader = shader;
+ if (![self configure]) {
+ return nil;
+ }
+ }
+ return self;
+}
+
+- (BOOL)configure {
+ EAGLContext *glContext =
+ [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
+ if (!glContext) {
+ glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
+ }
+ if (!glContext) {
+ RTCLogError(@"Failed to create EAGLContext");
+ return NO;
+ }
+ _glContext = glContext;
+
+ // GLKView manages a framebuffer for us.
+ _glkView = [[GLKView alloc] initWithFrame:CGRectZero
+ context:_glContext];
+ _glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;
+ _glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone;
+ _glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone;
+ _glkView.drawableMultisample = GLKViewDrawableMultisampleNone;
+ _glkView.delegate = self;
+ _glkView.layer.masksToBounds = YES;
+ _glkView.enableSetNeedsDisplay = NO;
+ [self addSubview:_glkView];
+
+ // Listen to application state in order to clean up OpenGL before app goes
+ // away.
+ NSNotificationCenter *notificationCenter =
+ [NSNotificationCenter defaultCenter];
+ [notificationCenter addObserver:self
+ selector:@selector(willResignActive)
+ name:UIApplicationWillResignActiveNotification
+ object:nil];
+ [notificationCenter addObserver:self
+ selector:@selector(didBecomeActive)
+ name:UIApplicationDidBecomeActiveNotification
+ object:nil];
+
+ // Frames are received on a separate thread, so we poll for current frame
+ // using a refresh rate proportional to screen refresh frequency. This
+ // occurs on the main thread.
+ __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self;
+ _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{
+ RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf;
+ [strongSelf displayLinkTimerDidFire];
+ }];
+ if ([[UIApplication sharedApplication] applicationState] == UIApplicationStateActive) {
+ [self setupGL];
+ }
+ return YES;
+}
+
+- (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled {
+ [super setMultipleTouchEnabled:multipleTouchEnabled];
+ _glkView.multipleTouchEnabled = multipleTouchEnabled;
+}
+
+- (void)dealloc {
+ [[NSNotificationCenter defaultCenter] removeObserver:self];
+ UIApplicationState appState =
+ [UIApplication sharedApplication].applicationState;
+ if (appState == UIApplicationStateActive) {
+ [self teardownGL];
+ }
+ [_timer invalidate];
+ [self ensureGLContext];
+ _shader = nil;
+ if (_glContext && [EAGLContext currentContext] == _glContext) {
+ [EAGLContext setCurrentContext:nil];
+ }
+}
+
+#pragma mark - UIView
+
+- (void)setNeedsDisplay {
+ [super setNeedsDisplay];
+ _isDirty = YES;
+}
+
+- (void)setNeedsDisplayInRect:(CGRect)rect {
+ [super setNeedsDisplayInRect:rect];
+ _isDirty = YES;
+}
+
+- (void)layoutSubviews {
+ [super layoutSubviews];
+ _glkView.frame = self.bounds;
+}
+
+#pragma mark - GLKViewDelegate
+
+// This method is called when the GLKView's content is dirty and needs to be
+// redrawn. This occurs on main thread.
+- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect {
+ // The renderer will draw the frame to the framebuffer corresponding to the
+ // one used by `view`.
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame;
+ if (!frame || frame.timeStampNs == _lastDrawnFrameTimeStampNs) {
+ return;
+ }
+ RTCVideoRotation rotation = frame.rotation;
+ if(_rotationOverride != nil) {
+ [_rotationOverride getValue: &rotation];
+ }
+ [self ensureGLContext];
+ glClear(GL_COLOR_BUFFER_BIT);
+ if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
+ if (!_nv12TextureCache) {
+ _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext];
+ }
+ if (_nv12TextureCache) {
+ [_nv12TextureCache uploadFrameToTextures:frame];
+ [_shader applyShadingForFrameWithWidth:frame.width
+ height:frame.height
+ rotation:rotation
+ yPlane:_nv12TextureCache.yTexture
+ uvPlane:_nv12TextureCache.uvTexture];
+ [_nv12TextureCache releaseTextures];
+
+ _lastDrawnFrameTimeStampNs = self.videoFrame.timeStampNs;
+ }
+ } else {
+ if (!_i420TextureCache) {
+ _i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:_glContext];
+ }
+ [_i420TextureCache uploadFrameToTextures:frame];
+ [_shader applyShadingForFrameWithWidth:frame.width
+ height:frame.height
+ rotation:rotation
+ yPlane:_i420TextureCache.yTexture
+ uPlane:_i420TextureCache.uTexture
+ vPlane:_i420TextureCache.vTexture];
+
+ _lastDrawnFrameTimeStampNs = self.videoFrame.timeStampNs;
+ }
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
+
+// These methods may be called on non-main thread.
+- (void)setSize:(CGSize)size {
+ __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf;
+ [strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
+ });
+}
+
+- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ self.videoFrame = frame;
+}
+
+#pragma mark - Private
+
+- (void)displayLinkTimerDidFire {
+ // Don't render unless video frame have changed or the view content
+ // has explicitly been marked dirty.
+ if (!_isDirty && _lastDrawnFrameTimeStampNs == self.videoFrame.timeStampNs) {
+ return;
+ }
+
+ // Always reset isDirty at this point, even if -[GLKView display]
+ // won't be called in the case the drawable size is empty.
+ _isDirty = NO;
+
+ // Only call -[GLKView display] if the drawable size is
+ // non-empty. Calling display will make the GLKView setup its
+ // render buffer if necessary, but that will fail with error
+ // GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT if size is empty.
+ if (self.bounds.size.width > 0 && self.bounds.size.height > 0) {
+ [_glkView display];
+ }
+}
+
+- (void)setupGL {
+ [self ensureGLContext];
+ glDisable(GL_DITHER);
+ _timer.isPaused = NO;
+}
+
+- (void)teardownGL {
+ self.videoFrame = nil;
+ _timer.isPaused = YES;
+ [_glkView deleteDrawable];
+ [self ensureGLContext];
+ _nv12TextureCache = nil;
+ _i420TextureCache = nil;
+}
+
+- (void)didBecomeActive {
+ [self setupGL];
+}
+
+- (void)willResignActive {
+ [self teardownGL];
+}
+
+- (void)ensureGLContext {
+ NSAssert(_glContext, @"context shouldn't be nil");
+ if ([EAGLContext currentContext] != _glContext) {
+ [EAGLContext setCurrentContext:_glContext];
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h
new file mode 100644
index 0000000000..9fdcc5a695
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCOpenGLDefines.h"
+#import "base/RTCVideoFrame.h"
+
+@interface RTCI420TextureCache : NSObject
+
+@property(nonatomic, readonly) GLuint yTexture;
+@property(nonatomic, readonly) GLuint uTexture;
+@property(nonatomic, readonly) GLuint vTexture;
+
+- (instancetype)init NS_UNAVAILABLE;
+- (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER;
+
+- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm
new file mode 100644
index 0000000000..5dccd4bf6a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm
@@ -0,0 +1,157 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCI420TextureCache.h"
+
+#if TARGET_OS_IPHONE
+#import <OpenGLES/ES3/gl.h>
+#else
+#import <OpenGL/gl3.h>
+#endif
+
+#import "base/RTCI420Buffer.h"
+#import "base/RTCVideoFrameBuffer.h"
+
+#include <vector>
+
+// Two sets of 3 textures are used here, one for each of the Y, U and V planes. Having two sets
+// alleviates CPU blockage in the event that the GPU is asked to render to a texture that is already
+// in use.
+static const GLsizei kNumTextureSets = 2;
+static const GLsizei kNumTexturesPerSet = 3;
+static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets;
+
+@implementation RTCI420TextureCache {
+ BOOL _hasUnpackRowLength;
+ GLint _currentTextureSet;
+ // Handles for OpenGL constructs.
+ GLuint _textures[kNumTextures];
+ // Used to create a non-padded plane for GPU upload when we receive padded frames.
+ std::vector<uint8_t> _planeBuffer;
+}
+
+- (GLuint)yTexture {
+ return _textures[_currentTextureSet * kNumTexturesPerSet];
+}
+
+- (GLuint)uTexture {
+ return _textures[_currentTextureSet * kNumTexturesPerSet + 1];
+}
+
+- (GLuint)vTexture {
+ return _textures[_currentTextureSet * kNumTexturesPerSet + 2];
+}
+
+- (instancetype)initWithContext:(GlContextType *)context {
+ if (self = [super init]) {
+#if TARGET_OS_IPHONE
+ _hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3);
+#else
+ _hasUnpackRowLength = YES;
+#endif
+ glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+
+ [self setupTextures];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ glDeleteTextures(kNumTextures, _textures);
+}
+
+- (void)setupTextures {
+ glGenTextures(kNumTextures, _textures);
+ // Set parameters for each of the textures we created.
+ for (GLsizei i = 0; i < kNumTextures; i++) {
+ glBindTexture(GL_TEXTURE_2D, _textures[i]);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ }
+}
+
+- (void)uploadPlane:(const uint8_t *)plane
+ texture:(GLuint)texture
+ width:(size_t)width
+ height:(size_t)height
+ stride:(int32_t)stride {
+ glBindTexture(GL_TEXTURE_2D, texture);
+
+ const uint8_t *uploadPlane = plane;
+ if ((size_t)stride != width) {
+ if (_hasUnpackRowLength) {
+ // GLES3 allows us to specify stride.
+ glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ RTC_PIXEL_FORMAT,
+ static_cast<GLsizei>(width),
+ static_cast<GLsizei>(height),
+ 0,
+ RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE,
+ uploadPlane);
+ glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
+ return;
+ } else {
+ // Make an unpadded copy and upload that instead. Quick profiling showed
+ // that this is faster than uploading row by row using glTexSubImage2D.
+ uint8_t *unpaddedPlane = _planeBuffer.data();
+ for (size_t y = 0; y < height; ++y) {
+ memcpy(unpaddedPlane + y * width, plane + y * stride, width);
+ }
+ uploadPlane = unpaddedPlane;
+ }
+ }
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ RTC_PIXEL_FORMAT,
+ static_cast<GLsizei>(width),
+ static_cast<GLsizei>(height),
+ 0,
+ RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE,
+ uploadPlane);
+}
+
+- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
+
+ id<RTC_OBJC_TYPE(RTCI420Buffer)> buffer = [frame.buffer toI420];
+
+ const int chromaWidth = buffer.chromaWidth;
+ const int chromaHeight = buffer.chromaHeight;
+ if (buffer.strideY != frame.width || buffer.strideU != chromaWidth ||
+ buffer.strideV != chromaWidth) {
+ _planeBuffer.resize(buffer.width * buffer.height);
+ }
+
+ [self uploadPlane:buffer.dataY
+ texture:self.yTexture
+ width:buffer.width
+ height:buffer.height
+ stride:buffer.strideY];
+
+ [self uploadPlane:buffer.dataU
+ texture:self.uTexture
+ width:chromaWidth
+ height:chromaHeight
+ stride:buffer.strideU];
+
+ [self uploadPlane:buffer.dataV
+ texture:self.vTexture
+ width:chromaWidth
+ height:chromaHeight
+ stride:buffer.strideV];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h
new file mode 100644
index 0000000000..c9ee986f88
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#if !TARGET_OS_IPHONE
+
+#import <AppKit/NSOpenGLView.h>
+
+#import "RTCVideoRenderer.h"
+#import "RTCVideoViewShading.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class RTC_OBJC_TYPE(RTCNSGLVideoView);
+
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCNSGLVideoViewDelegate)<RTC_OBJC_TYPE(RTCVideoViewDelegate)> @end
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCNSGLVideoView) : NSOpenGLView <RTC_OBJC_TYPE(RTCVideoRenderer)>
+
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
+
+- (instancetype)initWithFrame:(NSRect)frameRect
+ pixelFormat:(NSOpenGLPixelFormat *)format
+ shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
+
+#endif
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m
new file mode 100644
index 0000000000..168c73126f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m
@@ -0,0 +1,199 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#if !TARGET_OS_IPHONE
+
+#import "RTCNSGLVideoView.h"
+
+#import <AppKit/NSOpenGL.h>
+#import <CoreVideo/CVDisplayLink.h>
+#import <OpenGL/gl3.h>
+
+#import "RTCDefaultShader.h"
+#import "RTCI420TextureCache.h"
+#import "base/RTCLogging.h"
+#import "base/RTCVideoFrame.h"
+
+@interface RTC_OBJC_TYPE (RTCNSGLVideoView)
+()
+ // `videoFrame` is set when we receive a frame from a worker thread and is read
+ // from the display link callback so atomicity is required.
+ @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *
+ videoFrame;
+@property(atomic, strong) RTCI420TextureCache *i420TextureCache;
+
+- (void)drawFrame;
+@end
+
+static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
+ const CVTimeStamp *now,
+ const CVTimeStamp *outputTime,
+ CVOptionFlags flagsIn,
+ CVOptionFlags *flagsOut,
+ void *displayLinkContext) {
+ RTC_OBJC_TYPE(RTCNSGLVideoView) *view =
+ (__bridge RTC_OBJC_TYPE(RTCNSGLVideoView) *)displayLinkContext;
+ [view drawFrame];
+ return kCVReturnSuccess;
+}
+
+@implementation RTC_OBJC_TYPE (RTCNSGLVideoView) {
+ CVDisplayLinkRef _displayLink;
+ RTC_OBJC_TYPE(RTCVideoFrame) * _lastDrawnFrame;
+ id<RTC_OBJC_TYPE(RTCVideoViewShading)> _shader;
+}
+
+@synthesize delegate = _delegate;
+@synthesize videoFrame = _videoFrame;
+@synthesize i420TextureCache = _i420TextureCache;
+
+- (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format {
+ return [self initWithFrame:frame pixelFormat:format shader:[[RTCDefaultShader alloc] init]];
+}
+
+- (instancetype)initWithFrame:(NSRect)frame
+ pixelFormat:(NSOpenGLPixelFormat *)format
+ shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
+ if (self = [super initWithFrame:frame pixelFormat:format]) {
+ _shader = shader;
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self teardownDisplayLink];
+}
+
+- (void)drawRect:(NSRect)rect {
+ [self drawFrame];
+}
+
+- (void)reshape {
+ [super reshape];
+ NSRect frame = [self frame];
+ [self ensureGLContext];
+ CGLLockContext([[self openGLContext] CGLContextObj]);
+ glViewport(0, 0, frame.size.width, frame.size.height);
+ CGLUnlockContext([[self openGLContext] CGLContextObj]);
+}
+
+- (void)lockFocus {
+ NSOpenGLContext *context = [self openGLContext];
+ [super lockFocus];
+ if ([context view] != self) {
+ [context setView:self];
+ }
+ [context makeCurrentContext];
+}
+
+- (void)prepareOpenGL {
+ [super prepareOpenGL];
+ [self ensureGLContext];
+ glDisable(GL_DITHER);
+ [self setupDisplayLink];
+}
+
+- (void)clearGLContext {
+ [self ensureGLContext];
+ self.i420TextureCache = nil;
+ [super clearGLContext];
+}
+
+#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
+
+// These methods may be called on non-main thread.
+- (void)setSize:(CGSize)size {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ [self.delegate videoView:self didChangeVideoSize:size];
+ });
+}
+
+- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ self.videoFrame = frame;
+}
+
+#pragma mark - Private
+
+- (void)drawFrame {
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame;
+ if (!frame || frame == _lastDrawnFrame) {
+ return;
+ }
+ // This method may be called from CVDisplayLink callback which isn't on the
+ // main thread so we have to lock the GL context before drawing.
+ NSOpenGLContext *context = [self openGLContext];
+ CGLLockContext([context CGLContextObj]);
+
+ [self ensureGLContext];
+ glClear(GL_COLOR_BUFFER_BIT);
+
+ // Rendering native CVPixelBuffer is not supported on OS X.
+ // TODO(magjed): Add support for NV12 texture cache on OS X.
+ frame = [frame newI420VideoFrame];
+ if (!self.i420TextureCache) {
+ self.i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:context];
+ }
+ RTCI420TextureCache *i420TextureCache = self.i420TextureCache;
+ if (i420TextureCache) {
+ [i420TextureCache uploadFrameToTextures:frame];
+ [_shader applyShadingForFrameWithWidth:frame.width
+ height:frame.height
+ rotation:frame.rotation
+ yPlane:i420TextureCache.yTexture
+ uPlane:i420TextureCache.uTexture
+ vPlane:i420TextureCache.vTexture];
+ [context flushBuffer];
+ _lastDrawnFrame = frame;
+ }
+ CGLUnlockContext([context CGLContextObj]);
+}
+
+- (void)setupDisplayLink {
+ if (_displayLink) {
+ return;
+ }
+ // Synchronize buffer swaps with vertical refresh rate.
+ GLint swapInt = 1;
+ [[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
+
+ // Create display link.
+ CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
+ CVDisplayLinkSetOutputCallback(_displayLink,
+ &OnDisplayLinkFired,
+ (__bridge void *)self);
+ // Set the display link for the current renderer.
+ CGLContextObj cglContext = [[self openGLContext] CGLContextObj];
+ CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj];
+ CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(
+ _displayLink, cglContext, cglPixelFormat);
+ CVDisplayLinkStart(_displayLink);
+}
+
+- (void)teardownDisplayLink {
+ if (!_displayLink) {
+ return;
+ }
+ CVDisplayLinkRelease(_displayLink);
+ _displayLink = NULL;
+}
+
+- (void)ensureGLContext {
+ NSOpenGLContext* context = [self openGLContext];
+ NSAssert(context, @"context shouldn't be nil");
+ if ([NSOpenGLContext currentContext] != context) {
+ [context makeCurrentContext];
+ }
+}
+
+@end
+
+#endif // !TARGET_OS_IPHONE
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h
new file mode 100644
index 0000000000..f202b836b5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <GLKit/GLKit.h>
+
+#import "base/RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCVideoFrame);
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCNV12TextureCache : NSObject
+
+@property(nonatomic, readonly) GLuint yTexture;
+@property(nonatomic, readonly) GLuint uvTexture;
+
+- (instancetype)init NS_UNAVAILABLE;
+- (nullable instancetype)initWithContext:(EAGLContext *)context NS_DESIGNATED_INITIALIZER;
+
+- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
+
+- (void)releaseTextures;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m
new file mode 100644
index 0000000000..a520ac45b4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCNV12TextureCache.h"
+
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+@implementation RTCNV12TextureCache {
+ CVOpenGLESTextureCacheRef _textureCache;
+ CVOpenGLESTextureRef _yTextureRef;
+ CVOpenGLESTextureRef _uvTextureRef;
+}
+
+- (GLuint)yTexture {
+ return CVOpenGLESTextureGetName(_yTextureRef);
+}
+
+- (GLuint)uvTexture {
+ return CVOpenGLESTextureGetName(_uvTextureRef);
+}
+
+- (instancetype)initWithContext:(EAGLContext *)context {
+ if (self = [super init]) {
+ CVReturn ret = CVOpenGLESTextureCacheCreate(
+ kCFAllocatorDefault, NULL,
+#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
+ context,
+#else
+ (__bridge void *)context,
+#endif
+ NULL, &_textureCache);
+ if (ret != kCVReturnSuccess) {
+ self = nil;
+ }
+ }
+ return self;
+}
+
+- (BOOL)loadTexture:(CVOpenGLESTextureRef *)textureOut
+ pixelBuffer:(CVPixelBufferRef)pixelBuffer
+ planeIndex:(int)planeIndex
+ pixelFormat:(GLenum)pixelFormat {
+ const int width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex);
+ const int height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex);
+
+ if (*textureOut) {
+ CFRelease(*textureOut);
+ *textureOut = nil;
+ }
+ CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, pixelFormat, width,
+ height, pixelFormat, GL_UNSIGNED_BYTE, planeIndex, textureOut);
+ if (ret != kCVReturnSuccess) {
+ if (*textureOut) {
+ CFRelease(*textureOut);
+ *textureOut = nil;
+ }
+ return NO;
+ }
+ NSAssert(CVOpenGLESTextureGetTarget(*textureOut) == GL_TEXTURE_2D,
+ @"Unexpected GLES texture target");
+ glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(*textureOut));
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ return YES;
+}
+
+- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ NSAssert([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]],
+ @"frame must be CVPixelBuffer backed");
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
+ CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer;
+ return [self loadTexture:&_yTextureRef
+ pixelBuffer:pixelBuffer
+ planeIndex:0
+ pixelFormat:GL_LUMINANCE] &&
+ [self loadTexture:&_uvTextureRef
+ pixelBuffer:pixelBuffer
+ planeIndex:1
+ pixelFormat:GL_LUMINANCE_ALPHA];
+}
+
+- (void)releaseTextures {
+ if (_uvTextureRef) {
+ CFRelease(_uvTextureRef);
+ _uvTextureRef = nil;
+ }
+ if (_yTextureRef) {
+ CFRelease(_yTextureRef);
+ _yTextureRef = nil;
+ }
+}
+
+- (void)dealloc {
+ [self releaseTextures];
+ if (_textureCache) {
+ CFRelease(_textureCache);
+ _textureCache = nil;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h
new file mode 100644
index 0000000000..4088535861
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#if TARGET_OS_IPHONE
+#define RTC_PIXEL_FORMAT GL_LUMINANCE
+#define SHADER_VERSION
+#define VERTEX_SHADER_IN "attribute"
+#define VERTEX_SHADER_OUT "varying"
+#define FRAGMENT_SHADER_IN "varying"
+#define FRAGMENT_SHADER_OUT
+#define FRAGMENT_SHADER_COLOR "gl_FragColor"
+#define FRAGMENT_SHADER_TEXTURE "texture2D"
+
+@class EAGLContext;
+typedef EAGLContext GlContextType;
+#else
+#define RTC_PIXEL_FORMAT GL_RED
+#define SHADER_VERSION "#version 150\n"
+#define VERTEX_SHADER_IN "in"
+#define VERTEX_SHADER_OUT "out"
+#define FRAGMENT_SHADER_IN "in"
+#define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n"
+#define FRAGMENT_SHADER_COLOR "fragColor"
+#define FRAGMENT_SHADER_TEXTURE "texture"
+
+@class NSOpenGLContext;
+typedef NSOpenGLContext GlContextType;
+#endif
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.h
new file mode 100644
index 0000000000..d1b91fb643
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "base/RTCVideoFrame.h"
+
+RTC_EXTERN const char kRTCVertexShaderSource[];
+
+RTC_EXTERN GLuint RTCCreateShader(GLenum type, const GLchar* source);
+RTC_EXTERN GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader);
+RTC_EXTERN GLuint
+RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]);
+RTC_EXTERN BOOL RTCCreateVertexBuffer(GLuint* vertexBuffer,
+ GLuint* vertexArray);
+RTC_EXTERN void RTCSetVertexData(RTCVideoRotation rotation);
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm
new file mode 100644
index 0000000000..8eccd7fbec
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm
@@ -0,0 +1,189 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCShader.h"
+
+#if TARGET_OS_IPHONE
+#import <OpenGLES/ES3/gl.h>
+#else
+#import <OpenGL/gl3.h>
+#endif
+
+#include <algorithm>
+#include <array>
+#include <memory>
+
+#import "RTCOpenGLDefines.h"
+
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+// Vertex shader doesn't do anything except pass coordinates through.
+const char kRTCVertexShaderSource[] =
+ SHADER_VERSION
+ VERTEX_SHADER_IN " vec2 position;\n"
+ VERTEX_SHADER_IN " vec2 texcoord;\n"
+ VERTEX_SHADER_OUT " vec2 v_texcoord;\n"
+ "void main() {\n"
+ " gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n"
+ " v_texcoord = texcoord;\n"
+ "}\n";
+
+// Compiles a shader of the given `type` with GLSL source `source` and returns
+// the shader handle or 0 on error.
+GLuint RTCCreateShader(GLenum type, const GLchar *source) {
+ GLuint shader = glCreateShader(type);
+ if (!shader) {
+ return 0;
+ }
+ glShaderSource(shader, 1, &source, NULL);
+ glCompileShader(shader);
+ GLint compileStatus = GL_FALSE;
+ glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
+ if (compileStatus == GL_FALSE) {
+ GLint logLength = 0;
+ // The null termination character is included in the returned log length.
+ glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logLength);
+ if (logLength > 0) {
+ std::unique_ptr<char[]> compileLog(new char[logLength]);
+ // The returned string is null terminated.
+ glGetShaderInfoLog(shader, logLength, NULL, compileLog.get());
+ RTC_LOG(LS_ERROR) << "Shader compile error: " << compileLog.get();
+ }
+ glDeleteShader(shader);
+ shader = 0;
+ }
+ return shader;
+}
+
+// Links a shader program with the given vertex and fragment shaders and
+// returns the program handle or 0 on error.
+GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader) {
+ if (vertexShader == 0 || fragmentShader == 0) {
+ return 0;
+ }
+ GLuint program = glCreateProgram();
+ if (!program) {
+ return 0;
+ }
+ glAttachShader(program, vertexShader);
+ glAttachShader(program, fragmentShader);
+ glLinkProgram(program);
+ GLint linkStatus = GL_FALSE;
+ glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+ if (linkStatus == GL_FALSE) {
+ glDeleteProgram(program);
+ program = 0;
+ }
+ return program;
+}
+
+// Creates and links a shader program with the given fragment shader source and
+// a plain vertex shader. Returns the program handle or 0 on error.
+GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) {
+ GLuint vertexShader = RTCCreateShader(GL_VERTEX_SHADER, kRTCVertexShaderSource);
+ RTC_CHECK(vertexShader) << "failed to create vertex shader";
+ GLuint fragmentShader =
+ RTCCreateShader(GL_FRAGMENT_SHADER, fragmentShaderSource);
+ RTC_CHECK(fragmentShader) << "failed to create fragment shader";
+ GLuint program = RTCCreateProgram(vertexShader, fragmentShader);
+ // Shaders are created only to generate program.
+ if (vertexShader) {
+ glDeleteShader(vertexShader);
+ }
+ if (fragmentShader) {
+ glDeleteShader(fragmentShader);
+ }
+
+ // Set vertex shader variables 'position' and 'texcoord' in program.
+ GLint position = glGetAttribLocation(program, "position");
+ GLint texcoord = glGetAttribLocation(program, "texcoord");
+ if (position < 0 || texcoord < 0) {
+ glDeleteProgram(program);
+ return 0;
+ }
+
+ // Read position attribute with size of 2 and stride of 4 beginning at the start of the array. The
+ // last argument indicates offset of data within the vertex buffer.
+ glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)0);
+ glEnableVertexAttribArray(position);
+
+ // Read texcoord attribute with size of 2 and stride of 4 beginning at the first texcoord in the
+ // array. The last argument indicates offset of data within the vertex buffer.
+ glVertexAttribPointer(
+ texcoord, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)(2 * sizeof(GLfloat)));
+ glEnableVertexAttribArray(texcoord);
+
+ return program;
+}
+
+BOOL RTCCreateVertexBuffer(GLuint *vertexBuffer, GLuint *vertexArray) {
+#if !TARGET_OS_IPHONE
+ glGenVertexArrays(1, vertexArray);
+ if (*vertexArray == 0) {
+ return NO;
+ }
+ glBindVertexArray(*vertexArray);
+#endif
+ glGenBuffers(1, vertexBuffer);
+ if (*vertexBuffer == 0) {
+ glDeleteVertexArrays(1, vertexArray);
+ return NO;
+ }
+ glBindBuffer(GL_ARRAY_BUFFER, *vertexBuffer);
+ glBufferData(GL_ARRAY_BUFFER, 4 * 4 * sizeof(GLfloat), NULL, GL_DYNAMIC_DRAW);
+ return YES;
+}
+
+// Set vertex data to the currently bound vertex buffer.
+void RTCSetVertexData(RTCVideoRotation rotation) {
+ // When modelview and projection matrices are identity (default) the world is
+ // contained in the square around origin with unit size 2. Drawing to these
+ // coordinates is equivalent to drawing to the entire screen. The texture is
+ // stretched over that square using texture coordinates (u, v) that range
+ // from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically
+ // here because the incoming frame has origin in upper left hand corner but
+ // OpenGL expects origin in bottom left corner.
+ std::array<std::array<GLfloat, 2>, 4> UVCoords = {{
+ {{0, 1}}, // Lower left.
+ {{1, 1}}, // Lower right.
+ {{1, 0}}, // Upper right.
+ {{0, 0}}, // Upper left.
+ }};
+
+ // Rotate the UV coordinates.
+ int rotation_offset;
+ switch (rotation) {
+ case RTCVideoRotation_0:
+ rotation_offset = 0;
+ break;
+ case RTCVideoRotation_90:
+ rotation_offset = 1;
+ break;
+ case RTCVideoRotation_180:
+ rotation_offset = 2;
+ break;
+ case RTCVideoRotation_270:
+ rotation_offset = 3;
+ break;
+ }
+ std::rotate(UVCoords.begin(), UVCoords.begin() + rotation_offset,
+ UVCoords.end());
+
+ const GLfloat gVertices[] = {
+ // X, Y, U, V.
+ -1, -1, UVCoords[0][0], UVCoords[0][1],
+ 1, -1, UVCoords[1][0], UVCoords[1][1],
+ 1, 1, UVCoords[2][0], UVCoords[2][1],
+ -1, 1, UVCoords[3][0], UVCoords[3][1],
+ };
+
+ glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(gVertices), gVertices);
+}
diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h
new file mode 100644
index 0000000000..9df30a8fa0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCVideoFrame.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * RTCVideoViewShading provides a way for apps to customize the OpenGL(ES shaders
+ * used in rendering for the RTCEAGLVideoView/RTCNSGLVideoView.
+ */
+RTC_OBJC_EXPORT
+@protocol RTC_OBJC_TYPE
+(RTCVideoViewShading)<NSObject>
+
+ /** Callback for I420 frames. Each plane is given as a texture. */
+ - (void)applyShadingForFrameWithWidth : (int)width height : (int)height rotation
+ : (RTCVideoRotation)rotation yPlane : (GLuint)yPlane uPlane : (GLuint)uPlane vPlane
+ : (GLuint)vPlane;
+
+/** Callback for NV12 frames. Each plane is given as a texture. */
+- (void)applyShadingForFrameWithWidth:(int)width
+ height:(int)height
+ rotation:(RTCVideoRotation)rotation
+ yPlane:(GLuint)yPlane
+ uvPlane:(GLuint)uvPlane;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h
new file mode 100644
index 0000000000..a0cd8515d1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCCodecSpecificInfoH264.h"
+
+#include "modules/video_coding/include/video_codec_interface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/* Interfaces for converting to/from internal C++ formats. */
+@interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264)
+()
+
+ - (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h
new file mode 100644
index 0000000000..ae3003a115
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCCodecSpecificInfo.h"
+#import "RTCMacros.h"
+
+/** Class for H264 specific config. */
+typedef NS_ENUM(NSUInteger, RTCH264PacketizationMode) {
+ RTCH264PacketizationModeNonInterleaved = 0, // Mode 1 - STAP-A, FU-A is allowed
+ RTCH264PacketizationModeSingleNalUnit // Mode 0 - only single NALU allowed
+};
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264) : NSObject <RTC_OBJC_TYPE(RTCCodecSpecificInfo)>
+
+@property(nonatomic, assign) RTCH264PacketizationMode packetizationMode;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm
new file mode 100644
index 0000000000..e38ed307b3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCCodecSpecificInfoH264+Private.h"
+
+#import "RTCH264ProfileLevelId.h"
+
+// H264 specific settings.
+@implementation RTC_OBJC_TYPE (RTCCodecSpecificInfoH264)
+
+@synthesize packetizationMode = _packetizationMode;
+
+- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo {
+ webrtc::CodecSpecificInfo codecSpecificInfo;
+ codecSpecificInfo.codecType = webrtc::kVideoCodecH264;
+ codecSpecificInfo.codecSpecific.H264.packetization_mode =
+ (webrtc::H264PacketizationMode)_packetizationMode;
+
+ return codecSpecificInfo;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h
new file mode 100644
index 0000000000..de5a9c4684
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoderFactory.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** This decoder factory include support for all codecs bundled with WebRTC. If using custom
+ * codecs, create custom implementations of RTCVideoEncoderFactory and
+ * RTCVideoDecoderFactory.
+ */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoderFactory)>
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m
new file mode 100644
index 0000000000..6e3baa8750
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDefaultVideoDecoderFactory.h"
+
+#import "RTCH264ProfileLevelId.h"
+#import "RTCVideoDecoderH264.h"
+#import "api/video_codec/RTCVideoCodecConstants.h"
+#import "api/video_codec/RTCVideoDecoderVP8.h"
+#import "api/video_codec/RTCVideoDecoderVP9.h"
+#import "base/RTCVideoCodecInfo.h"
+
+#if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY)
+#import "api/video_codec/RTCVideoDecoderAV1.h" // nogncheck
+#endif
+
+@implementation RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory)
+
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
+ NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
+ parameters:constrainedHighParams];
+
+ NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
+ parameters:constrainedBaselineParams];
+
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name];
+
+ NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *result = [@[
+ constrainedHighInfo,
+ constrainedBaselineInfo,
+ vp8Info,
+ ] mutableCopy];
+
+ if ([RTC_OBJC_TYPE(RTCVideoDecoderVP9) isSupported]) {
+ [result
+ addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]];
+ }
+
+#if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY)
+ [result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name]];
+#endif
+
+ return result;
+}
+
+- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
+ if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
+ return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
+ } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
+ return [RTC_OBJC_TYPE(RTCVideoDecoderVP8) vp8Decoder];
+ } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name] &&
+ [RTC_OBJC_TYPE(RTCVideoDecoderVP9) isSupported]) {
+ return [RTC_OBJC_TYPE(RTCVideoDecoderVP9) vp9Decoder];
+ }
+
+#if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY)
+ if ([info.name isEqualToString:kRTCVideoCodecAv1Name]) {
+ return [RTC_OBJC_TYPE(RTCVideoDecoderAV1) av1Decoder];
+ }
+#endif
+
+ return nil;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h
new file mode 100644
index 0000000000..92ab40c95b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoderFactory.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** This encoder factory include support for all codecs bundled with WebRTC. If using custom
+ * codecs, create custom implementations of RTCVideoEncoderFactory and
+ * RTCVideoDecoderFactory.
+ */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoderFactory)>
+
+@property(nonatomic, retain) RTC_OBJC_TYPE(RTCVideoCodecInfo) *preferredCodec;
+
++ (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m
new file mode 100644
index 0000000000..8de55bde4a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDefaultVideoEncoderFactory.h"
+
+#import "RTCH264ProfileLevelId.h"
+#import "RTCVideoEncoderH264.h"
+#import "api/video_codec/RTCVideoCodecConstants.h"
+#import "api/video_codec/RTCVideoEncoderVP8.h"
+#import "api/video_codec/RTCVideoEncoderVP9.h"
+#import "base/RTCVideoCodecInfo.h"
+
+#if defined(RTC_USE_LIBAOM_AV1_ENCODER)
+#import "api/video_codec/RTCVideoEncoderAV1.h" // nogncheck
+#endif
+
+@implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory)
+
+@synthesize preferredCodec;
+
++ (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
+ NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
+ parameters:constrainedHighParams];
+
+ NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
+ parameters:constrainedBaselineParams];
+
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name];
+
+ NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *result = [@[
+ constrainedHighInfo,
+ constrainedBaselineInfo,
+ vp8Info,
+ ] mutableCopy];
+
+ if ([RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) {
+ [result
+ addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]];
+ }
+
+#if defined(RTC_USE_LIBAOM_AV1_ENCODER)
+ [result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name]];
+#endif
+
+ return result;
+}
+
+- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
+ if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
+ return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
+ } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
+ return [RTC_OBJC_TYPE(RTCVideoEncoderVP8) vp8Encoder];
+ } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name] &&
+ [RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) {
+ return [RTC_OBJC_TYPE(RTCVideoEncoderVP9) vp9Encoder];
+ }
+
+#if defined(RTC_USE_LIBAOM_AV1_ENCODER)
+ if ([info.name isEqualToString:kRTCVideoCodecAv1Name]) {
+ return [RTC_OBJC_TYPE(RTCVideoEncoderAV1) av1Encoder];
+ }
+#endif
+
+ return nil;
+}
+
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
+ NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs =
+ [[[self class] supportedCodecs] mutableCopy];
+
+ NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *orderedCodecs = [NSMutableArray array];
+ NSUInteger index = [codecs indexOfObject:self.preferredCodec];
+ if (index != NSNotFound) {
+ [orderedCodecs addObject:[codecs objectAtIndex:index]];
+ [codecs removeObjectAtIndex:index];
+ }
+ [orderedCodecs addObjectsFromArray:codecs];
+
+ return [orderedCodecs copy];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h
new file mode 100644
index 0000000000..dac7bb5610
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+RTC_OBJC_EXPORT extern NSString *const kRTCVideoCodecH264Name;
+RTC_OBJC_EXPORT extern NSString *const kRTCLevel31ConstrainedHigh;
+RTC_OBJC_EXPORT extern NSString *const kRTCLevel31ConstrainedBaseline;
+RTC_OBJC_EXPORT extern NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedHigh;
+RTC_OBJC_EXPORT extern NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedBaseline;
+
+/** H264 Profiles and levels. */
+typedef NS_ENUM(NSUInteger, RTCH264Profile) {
+ RTCH264ProfileConstrainedBaseline,
+ RTCH264ProfileBaseline,
+ RTCH264ProfileMain,
+ RTCH264ProfileConstrainedHigh,
+ RTCH264ProfileHigh,
+};
+
+typedef NS_ENUM(NSUInteger, RTCH264Level) {
+ RTCH264Level1_b = 0,
+ RTCH264Level1 = 10,
+ RTCH264Level1_1 = 11,
+ RTCH264Level1_2 = 12,
+ RTCH264Level1_3 = 13,
+ RTCH264Level2 = 20,
+ RTCH264Level2_1 = 21,
+ RTCH264Level2_2 = 22,
+ RTCH264Level3 = 30,
+ RTCH264Level3_1 = 31,
+ RTCH264Level3_2 = 32,
+ RTCH264Level4 = 40,
+ RTCH264Level4_1 = 41,
+ RTCH264Level4_2 = 42,
+ RTCH264Level5 = 50,
+ RTCH264Level5_1 = 51,
+ RTCH264Level5_2 = 52
+};
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCH264ProfileLevelId) : NSObject
+
+@property(nonatomic, readonly) RTCH264Profile profile;
+@property(nonatomic, readonly) RTCH264Level level;
+@property(nonatomic, readonly) NSString *hexString;
+
+- (instancetype)initWithHexString:(NSString *)hexString;
+- (instancetype)initWithProfile:(RTCH264Profile)profile level:(RTCH264Level)level;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm
new file mode 100644
index 0000000000..f0ef3ec232
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import "RTCH264ProfileLevelId.h"
+
+#import "helpers/NSString+StdString.h"
+#if defined(WEBRTC_IOS)
+#import "UIDevice+H264Profile.h"
+#endif
+
+#include "api/video_codecs/h264_profile_level_id.h"
+#include "media/base/media_constants.h"
+
+namespace {
+
+NSString *MaxSupportedProfileLevelConstrainedHigh();
+NSString *MaxSupportedProfileLevelConstrainedBaseline();
+
+} // namespace
+
+NSString *const kRTCVideoCodecH264Name = @(cricket::kH264CodecName);
+NSString *const kRTCLevel31ConstrainedHigh = @"640c1f";
+NSString *const kRTCLevel31ConstrainedBaseline = @"42e01f";
+NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedHigh =
+ MaxSupportedProfileLevelConstrainedHigh();
+NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedBaseline =
+ MaxSupportedProfileLevelConstrainedBaseline();
+
+namespace {
+
+#if defined(WEBRTC_IOS)
+
+NSString *MaxSupportedLevelForProfile(webrtc::H264Profile profile) {
+ const absl::optional<webrtc::H264ProfileLevelId> profileLevelId =
+ [UIDevice maxSupportedH264Profile];
+ if (profileLevelId && profileLevelId->profile >= profile) {
+ const absl::optional<std::string> profileString =
+ H264ProfileLevelIdToString(webrtc::H264ProfileLevelId(profile, profileLevelId->level));
+ if (profileString) {
+ return [NSString stringForStdString:*profileString];
+ }
+ }
+ return nil;
+}
+#endif
+
+NSString *MaxSupportedProfileLevelConstrainedBaseline() {
+#if defined(WEBRTC_IOS)
+ NSString *profile = MaxSupportedLevelForProfile(webrtc::H264Profile::kProfileConstrainedBaseline);
+ if (profile != nil) {
+ return profile;
+ }
+#endif
+ return kRTCLevel31ConstrainedBaseline;
+}
+
+NSString *MaxSupportedProfileLevelConstrainedHigh() {
+#if defined(WEBRTC_IOS)
+ NSString *profile = MaxSupportedLevelForProfile(webrtc::H264Profile::kProfileConstrainedHigh);
+ if (profile != nil) {
+ return profile;
+ }
+#endif
+ return kRTCLevel31ConstrainedHigh;
+}
+
+} // namespace
+
+@interface RTC_OBJC_TYPE (RTCH264ProfileLevelId)
+()
+
+ @property(nonatomic, assign) RTCH264Profile profile;
+@property(nonatomic, assign) RTCH264Level level;
+@property(nonatomic, strong) NSString *hexString;
+
+@end
+
+@implementation RTC_OBJC_TYPE (RTCH264ProfileLevelId)
+
+@synthesize profile = _profile;
+@synthesize level = _level;
+@synthesize hexString = _hexString;
+
+- (instancetype)initWithHexString:(NSString *)hexString {
+ if (self = [super init]) {
+ self.hexString = hexString;
+
+ absl::optional<webrtc::H264ProfileLevelId> profile_level_id =
+ webrtc::ParseH264ProfileLevelId([hexString cStringUsingEncoding:NSUTF8StringEncoding]);
+ if (profile_level_id.has_value()) {
+ self.profile = static_cast<RTCH264Profile>(profile_level_id->profile);
+ self.level = static_cast<RTCH264Level>(profile_level_id->level);
+ }
+ }
+ return self;
+}
+
+- (instancetype)initWithProfile:(RTCH264Profile)profile level:(RTCH264Level)level {
+ if (self = [super init]) {
+ self.profile = profile;
+ self.level = level;
+
+ absl::optional<std::string> hex_string =
+ webrtc::H264ProfileLevelIdToString(webrtc::H264ProfileLevelId(
+ static_cast<webrtc::H264Profile>(profile), static_cast<webrtc::H264Level>(level)));
+ self.hexString =
+ [NSString stringWithCString:hex_string.value_or("").c_str() encoding:NSUTF8StringEncoding];
+ }
+ return self;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h
new file mode 100644
index 0000000000..88bacbbdfe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoderFactory.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoderFactory)>
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m
new file mode 100644
index 0000000000..bdae19d687
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoDecoderFactoryH264.h"
+
+#import "RTCH264ProfileLevelId.h"
+#import "RTCVideoDecoderH264.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264)
+
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
+ NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs = [NSMutableArray array];
+ NSString *codecName = kRTCVideoCodecH264Name;
+
+ NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
+ parameters:constrainedHighParams];
+ [codecs addObject:constrainedHighInfo];
+
+ NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
+ parameters:constrainedBaselineParams];
+ [codecs addObject:constrainedBaselineInfo];
+
+ return [codecs copy];
+}
+
+- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
+ return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.h
new file mode 100644
index 0000000000..a12e4212a7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoDecoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoDecoderH264) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoder)>
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm
new file mode 100644
index 0000000000..09e642bc37
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm
@@ -0,0 +1,276 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import "RTCVideoDecoderH264.h"
+
+#import <VideoToolbox/VideoToolbox.h>
+
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+#import "helpers.h"
+#import "helpers/scoped_cftyperef.h"
+
+#if defined(WEBRTC_IOS)
+#import "helpers/UIDevice+RTCDevice.h"
+#endif
+
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/objc/components/video_codec/nalu_rewriter.h"
+
+// Struct that we pass to the decoder per frame to decode. We receive it again
+// in the decoder callback.
+struct RTCFrameDecodeParams {
+ RTCFrameDecodeParams(RTCVideoDecoderCallback cb, int64_t ts) : callback(cb), timestamp(ts) {}
+ RTCVideoDecoderCallback callback;
+ int64_t timestamp;
+};
+
+@interface RTC_OBJC_TYPE (RTCVideoDecoderH264)
+() - (void)setError : (OSStatus)error;
+@end
+
+// This is the callback function that VideoToolbox calls when decode is
+// complete.
+void decompressionOutputCallback(void *decoderRef,
+ void *params,
+ OSStatus status,
+ VTDecodeInfoFlags infoFlags,
+ CVImageBufferRef imageBuffer,
+ CMTime timestamp,
+ CMTime duration) {
+ std::unique_ptr<RTCFrameDecodeParams> decodeParams(
+ reinterpret_cast<RTCFrameDecodeParams *>(params));
+ if (status != noErr) {
+ RTC_OBJC_TYPE(RTCVideoDecoderH264) *decoder =
+ (__bridge RTC_OBJC_TYPE(RTCVideoDecoderH264) *)decoderRef;
+ [decoder setError:status];
+ RTC_LOG(LS_ERROR) << "Failed to decode frame. Status: " << status;
+ return;
+ }
+ // TODO(tkchin): Handle CVO properly.
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *frameBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:imageBuffer];
+ RTC_OBJC_TYPE(RTCVideoFrame) *decodedFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc]
+ initWithBuffer:frameBuffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec];
+ decodedFrame.timeStamp = decodeParams->timestamp;
+ decodeParams->callback(decodedFrame);
+}
+
+// Decoder.
+@implementation RTC_OBJC_TYPE (RTCVideoDecoderH264) {
+ CMVideoFormatDescriptionRef _videoFormat;
+ CMMemoryPoolRef _memoryPool;
+ VTDecompressionSessionRef _decompressionSession;
+ RTCVideoDecoderCallback _callback;
+ OSStatus _error;
+}
+
+- (instancetype)init {
+ self = [super init];
+ if (self) {
+ _memoryPool = CMMemoryPoolCreate(nil);
+ }
+ return self;
+}
+
+- (void)dealloc {
+ CMMemoryPoolInvalidate(_memoryPool);
+ CFRelease(_memoryPool);
+ [self destroyDecompressionSession];
+ [self setVideoFormat:nullptr];
+}
+
+- (NSInteger)startDecodeWithNumberOfCores:(int)numberOfCores {
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)inputImage
+ missingFrames:(BOOL)missingFrames
+ codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
+ renderTimeMs:(int64_t)renderTimeMs {
+ RTC_DCHECK(inputImage.buffer);
+
+ if (_error != noErr) {
+ RTC_LOG(LS_WARNING) << "Last frame decode failed.";
+ _error = noErr;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ rtc::ScopedCFTypeRef<CMVideoFormatDescriptionRef> inputFormat =
+ rtc::ScopedCF(webrtc::CreateVideoFormatDescription((uint8_t *)inputImage.buffer.bytes,
+ inputImage.buffer.length));
+ if (inputFormat) {
+ // Check if the video format has changed, and reinitialize decoder if
+ // needed.
+ if (!CMFormatDescriptionEqual(inputFormat.get(), _videoFormat)) {
+ [self setVideoFormat:inputFormat.get()];
+ int resetDecompressionSessionError = [self resetDecompressionSession];
+ if (resetDecompressionSessionError != WEBRTC_VIDEO_CODEC_OK) {
+ return resetDecompressionSessionError;
+ }
+ }
+ }
+ if (!_videoFormat) {
+ // We received a frame but we don't have format information so we can't
+ // decode it.
+ // This can happen after backgrounding. We need to wait for the next
+ // sps/pps before we can resume so we request a keyframe by returning an
+ // error.
+ RTC_LOG(LS_WARNING) << "Missing video format. Frame with sps/pps required.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ CMSampleBufferRef sampleBuffer = nullptr;
+ if (!webrtc::H264AnnexBBufferToCMSampleBuffer((uint8_t *)inputImage.buffer.bytes,
+ inputImage.buffer.length,
+ _videoFormat,
+ &sampleBuffer,
+ _memoryPool)) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ RTC_DCHECK(sampleBuffer);
+ VTDecodeFrameFlags decodeFlags = kVTDecodeFrame_EnableAsynchronousDecompression;
+ std::unique_ptr<RTCFrameDecodeParams> frameDecodeParams;
+ frameDecodeParams.reset(new RTCFrameDecodeParams(_callback, inputImage.timeStamp));
+ OSStatus status = VTDecompressionSessionDecodeFrame(
+ _decompressionSession, sampleBuffer, decodeFlags, frameDecodeParams.release(), nullptr);
+#if defined(WEBRTC_IOS)
+ // Re-initialize the decoder if we have an invalid session while the app is
+ // active or decoder malfunctions and retry the decode request.
+ if ((status == kVTInvalidSessionErr || status == kVTVideoDecoderMalfunctionErr) &&
+ [self resetDecompressionSession] == WEBRTC_VIDEO_CODEC_OK) {
+ RTC_LOG(LS_INFO) << "Failed to decode frame with code: " << status
+ << " retrying decode after decompression session reset";
+ frameDecodeParams.reset(new RTCFrameDecodeParams(_callback, inputImage.timeStamp));
+ status = VTDecompressionSessionDecodeFrame(
+ _decompressionSession, sampleBuffer, decodeFlags, frameDecodeParams.release(), nullptr);
+ }
+#endif
+ CFRelease(sampleBuffer);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to decode frame with code: " << status;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+- (void)setCallback:(RTCVideoDecoderCallback)callback {
+ _callback = callback;
+}
+
+- (void)setError:(OSStatus)error {
+ _error = error;
+}
+
+- (NSInteger)releaseDecoder {
+ // Need to invalidate the session so that callbacks no longer occur and it
+ // is safe to null out the callback.
+ [self destroyDecompressionSession];
+ [self setVideoFormat:nullptr];
+ _callback = nullptr;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+#pragma mark - Private
+
+- (int)resetDecompressionSession {
+ [self destroyDecompressionSession];
+
+ // Need to wait for the first SPS to initialize decoder.
+ if (!_videoFormat) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ // Set keys for OpenGL and IOSurface compatibilty, which makes the encoder
+ // create pixel buffers with GPU backed memory. The intent here is to pass
+ // the pixel buffers directly so we avoid a texture upload later during
+ // rendering. This currently is moot because we are converting back to an
+ // I420 frame after decode, but eventually we will be able to plumb
+ // CVPixelBuffers directly to the renderer.
+ // TODO(tkchin): Maybe only set OpenGL/IOSurface keys if we know that that
+ // we can pass CVPixelBuffers as native handles in decoder output.
+ NSDictionary *attributes = @{
+#if defined(WEBRTC_IOS) && (TARGET_OS_MACCATALYST || TARGET_OS_SIMULATOR)
+ (NSString *)kCVPixelBufferMetalCompatibilityKey : @(YES),
+#elif defined(WEBRTC_IOS)
+ (NSString *)kCVPixelBufferOpenGLESCompatibilityKey : @(YES),
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_ARCH_ARM64)
+ (NSString *)kCVPixelBufferOpenGLCompatibilityKey : @(YES),
+#endif
+#if !(TARGET_OS_SIMULATOR)
+ (NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{},
+#endif
+ (NSString *)
+ kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
+ };
+
+ VTDecompressionOutputCallbackRecord record = {
+ decompressionOutputCallback, (__bridge void *)self,
+ };
+ OSStatus status = VTDecompressionSessionCreate(nullptr,
+ _videoFormat,
+ nullptr,
+ (__bridge CFDictionaryRef)attributes,
+ &record,
+ &_decompressionSession);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to create decompression session: " << status;
+ [self destroyDecompressionSession];
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ [self configureDecompressionSession];
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+- (void)configureDecompressionSession {
+ RTC_DCHECK(_decompressionSession);
+#if defined(WEBRTC_IOS)
+ VTSessionSetProperty(_decompressionSession, kVTDecompressionPropertyKey_RealTime, kCFBooleanTrue);
+#endif
+}
+
+- (void)destroyDecompressionSession {
+ if (_decompressionSession) {
+#if defined(WEBRTC_IOS)
+ if ([UIDevice isIOS11OrLater]) {
+ VTDecompressionSessionWaitForAsynchronousFrames(_decompressionSession);
+ }
+#endif
+ VTDecompressionSessionInvalidate(_decompressionSession);
+ CFRelease(_decompressionSession);
+ _decompressionSession = nullptr;
+ }
+}
+
+- (void)setVideoFormat:(CMVideoFormatDescriptionRef)videoFormat {
+ if (_videoFormat == videoFormat) {
+ return;
+ }
+ if (_videoFormat) {
+ CFRelease(_videoFormat);
+ }
+ _videoFormat = videoFormat;
+ if (_videoFormat) {
+ CFRetain(_videoFormat);
+ }
+}
+
+- (NSString *)implementationName {
+ return @"VideoToolbox";
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h
new file mode 100644
index 0000000000..45fc4be2ea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoEncoderFactory.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoderFactory)>
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m
new file mode 100644
index 0000000000..9843849307
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCVideoEncoderFactoryH264.h"
+
+#import "RTCH264ProfileLevelId.h"
+#import "RTCVideoEncoderH264.h"
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264)
+
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
+ NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs = [NSMutableArray array];
+ NSString *codecName = kRTCVideoCodecH264Name;
+
+ NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
+ parameters:constrainedHighParams];
+ [codecs addObject:constrainedHighInfo];
+
+ NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
+ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
+ @"level-asymmetry-allowed" : @"1",
+ @"packetization-mode" : @"1",
+ };
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
+ parameters:constrainedBaselineParams];
+ [codecs addObject:constrainedBaselineInfo];
+
+ return [codecs copy];
+}
+
+- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
+ return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.h b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.h
new file mode 100644
index 0000000000..9f4f4c7c8d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoCodecInfo.h"
+#import "RTCVideoEncoder.h"
+
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCVideoEncoderH264) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoder)>
+
+- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm
new file mode 100644
index 0000000000..7dbbfaf019
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm
@@ -0,0 +1,819 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#import "RTCVideoEncoderH264.h"
+
+#import <VideoToolbox/VideoToolbox.h>
+#include <vector>
+
+#if defined(WEBRTC_IOS)
+#import "helpers/UIDevice+RTCDevice.h"
+#endif
+#import "RTCCodecSpecificInfoH264.h"
+#import "RTCH264ProfileLevelId.h"
+#import "api/peerconnection/RTCVideoCodecInfo+Private.h"
+#import "base/RTCCodecSpecificInfo.h"
+#import "base/RTCI420Buffer.h"
+#import "base/RTCVideoEncoder.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+#import "helpers.h"
+
+#include "api/video_codecs/h264_profile_level_id.h"
+#include "common_video/h264/h264_bitstream_parser.h"
+#include "common_video/include/bitrate_adjuster.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/buffer.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/objc/components/video_codec/nalu_rewriter.h"
+#include "third_party/libyuv/include/libyuv/convert_from.h"
+
+@interface RTC_OBJC_TYPE (RTCVideoEncoderH264)
+()
+
+ - (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags sampleBuffer
+ : (CMSampleBufferRef)sampleBuffer codecSpecificInfo
+ : (id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo width : (int32_t)width height
+ : (int32_t)height renderTimeMs : (int64_t)renderTimeMs timestamp : (uint32_t)timestamp rotation
+ : (RTCVideoRotation)rotation;
+
+@end
+
+namespace { // anonymous namespace
+
+// The ratio between kVTCompressionPropertyKey_DataRateLimits and
+// kVTCompressionPropertyKey_AverageBitRate. The data rate limit is set higher
+// than the average bit rate to avoid undershooting the target.
+const float kLimitToAverageBitRateFactor = 1.5f;
+// These thresholds deviate from the default h264 QP thresholds, as they
+// have been found to work better on devices that support VideoToolbox
+const int kLowH264QpThreshold = 28;
+const int kHighH264QpThreshold = 39;
+
+const OSType kNV12PixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
+
+// Struct that we pass to the encoder per frame to encode. We receive it again
+// in the encoder callback.
+struct RTCFrameEncodeParams {
+ RTCFrameEncodeParams(RTC_OBJC_TYPE(RTCVideoEncoderH264) * e,
+ RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) * csi,
+ int32_t w,
+ int32_t h,
+ int64_t rtms,
+ uint32_t ts,
+ RTCVideoRotation r)
+ : encoder(e), width(w), height(h), render_time_ms(rtms), timestamp(ts), rotation(r) {
+ if (csi) {
+ codecSpecificInfo = csi;
+ } else {
+ codecSpecificInfo = [[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) alloc] init];
+ }
+ }
+
+ RTC_OBJC_TYPE(RTCVideoEncoderH264) * encoder;
+ RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) * codecSpecificInfo;
+ int32_t width;
+ int32_t height;
+ int64_t render_time_ms;
+ uint32_t timestamp;
+ RTCVideoRotation rotation;
+};
+
+// We receive I420Frames as input, but we need to feed CVPixelBuffers into the
+// encoder. This performs the copy and format conversion.
+// TODO(tkchin): See if encoder will accept i420 frames and compare performance.
+bool CopyVideoFrameToNV12PixelBuffer(id<RTC_OBJC_TYPE(RTCI420Buffer)> frameBuffer,
+ CVPixelBufferRef pixelBuffer) {
+ RTC_DCHECK(pixelBuffer);
+ RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer), kNV12PixelFormat);
+ RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixelBuffer, 0), frameBuffer.height);
+ RTC_DCHECK_EQ(CVPixelBufferGetWidthOfPlane(pixelBuffer, 0), frameBuffer.width);
+
+ CVReturn cvRet = CVPixelBufferLockBaseAddress(pixelBuffer, 0);
+ if (cvRet != kCVReturnSuccess) {
+ RTC_LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
+ return false;
+ }
+ uint8_t *dstY = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
+ int dstStrideY = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
+ uint8_t *dstUV = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
+ int dstStrideUV = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
+ // Convert I420 to NV12.
+ int ret = libyuv::I420ToNV12(frameBuffer.dataY,
+ frameBuffer.strideY,
+ frameBuffer.dataU,
+ frameBuffer.strideU,
+ frameBuffer.dataV,
+ frameBuffer.strideV,
+ dstY,
+ dstStrideY,
+ dstUV,
+ dstStrideUV,
+ frameBuffer.width,
+ frameBuffer.height);
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
+ if (ret) {
+ RTC_LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret;
+ return false;
+ }
+ return true;
+}
+
+CVPixelBufferRef CreatePixelBuffer(CVPixelBufferPoolRef pixel_buffer_pool) {
+ if (!pixel_buffer_pool) {
+ RTC_LOG(LS_ERROR) << "Failed to get pixel buffer pool.";
+ return nullptr;
+ }
+ CVPixelBufferRef pixel_buffer;
+ CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nullptr, pixel_buffer_pool, &pixel_buffer);
+ if (ret != kCVReturnSuccess) {
+ RTC_LOG(LS_ERROR) << "Failed to create pixel buffer: " << ret;
+ // We probably want to drop frames here, since failure probably means
+ // that the pool is empty.
+ return nullptr;
+ }
+ return pixel_buffer;
+}
+
+// This is the callback function that VideoToolbox calls when encode is
+// complete. From inspection this happens on its own queue.
+void compressionOutputCallback(void *encoder,
+ void *params,
+ OSStatus status,
+ VTEncodeInfoFlags infoFlags,
+ CMSampleBufferRef sampleBuffer) {
+ if (!params) {
+ // If there are pending callbacks when the encoder is destroyed, this can happen.
+ return;
+ }
+ std::unique_ptr<RTCFrameEncodeParams> encodeParams(
+ reinterpret_cast<RTCFrameEncodeParams *>(params));
+ [encodeParams->encoder frameWasEncoded:status
+ flags:infoFlags
+ sampleBuffer:sampleBuffer
+ codecSpecificInfo:encodeParams->codecSpecificInfo
+ width:encodeParams->width
+ height:encodeParams->height
+ renderTimeMs:encodeParams->render_time_ms
+ timestamp:encodeParams->timestamp
+ rotation:encodeParams->rotation];
+}
+
+// Extract VideoToolbox profile out of the webrtc::SdpVideoFormat. If there is
+// no specific VideoToolbox profile for the specified level, AutoLevel will be
+// returned. The user must initialize the encoder with a resolution and
+// framerate conforming to the selected H264 level regardless.
+CFStringRef ExtractProfile(const webrtc::H264ProfileLevelId &profile_level_id) {
+ switch (profile_level_id.profile) {
+ case webrtc::H264Profile::kProfileConstrainedBaseline:
+ case webrtc::H264Profile::kProfileBaseline:
+ switch (profile_level_id.level) {
+ case webrtc::H264Level::kLevel3:
+ return kVTProfileLevel_H264_Baseline_3_0;
+ case webrtc::H264Level::kLevel3_1:
+ return kVTProfileLevel_H264_Baseline_3_1;
+ case webrtc::H264Level::kLevel3_2:
+ return kVTProfileLevel_H264_Baseline_3_2;
+ case webrtc::H264Level::kLevel4:
+ return kVTProfileLevel_H264_Baseline_4_0;
+ case webrtc::H264Level::kLevel4_1:
+ return kVTProfileLevel_H264_Baseline_4_1;
+ case webrtc::H264Level::kLevel4_2:
+ return kVTProfileLevel_H264_Baseline_4_2;
+ case webrtc::H264Level::kLevel5:
+ return kVTProfileLevel_H264_Baseline_5_0;
+ case webrtc::H264Level::kLevel5_1:
+ return kVTProfileLevel_H264_Baseline_5_1;
+ case webrtc::H264Level::kLevel5_2:
+ return kVTProfileLevel_H264_Baseline_5_2;
+ case webrtc::H264Level::kLevel1:
+ case webrtc::H264Level::kLevel1_b:
+ case webrtc::H264Level::kLevel1_1:
+ case webrtc::H264Level::kLevel1_2:
+ case webrtc::H264Level::kLevel1_3:
+ case webrtc::H264Level::kLevel2:
+ case webrtc::H264Level::kLevel2_1:
+ case webrtc::H264Level::kLevel2_2:
+ return kVTProfileLevel_H264_Baseline_AutoLevel;
+ }
+
+ case webrtc::H264Profile::kProfileMain:
+ switch (profile_level_id.level) {
+ case webrtc::H264Level::kLevel3:
+ return kVTProfileLevel_H264_Main_3_0;
+ case webrtc::H264Level::kLevel3_1:
+ return kVTProfileLevel_H264_Main_3_1;
+ case webrtc::H264Level::kLevel3_2:
+ return kVTProfileLevel_H264_Main_3_2;
+ case webrtc::H264Level::kLevel4:
+ return kVTProfileLevel_H264_Main_4_0;
+ case webrtc::H264Level::kLevel4_1:
+ return kVTProfileLevel_H264_Main_4_1;
+ case webrtc::H264Level::kLevel4_2:
+ return kVTProfileLevel_H264_Main_4_2;
+ case webrtc::H264Level::kLevel5:
+ return kVTProfileLevel_H264_Main_5_0;
+ case webrtc::H264Level::kLevel5_1:
+ return kVTProfileLevel_H264_Main_5_1;
+ case webrtc::H264Level::kLevel5_2:
+ return kVTProfileLevel_H264_Main_5_2;
+ case webrtc::H264Level::kLevel1:
+ case webrtc::H264Level::kLevel1_b:
+ case webrtc::H264Level::kLevel1_1:
+ case webrtc::H264Level::kLevel1_2:
+ case webrtc::H264Level::kLevel1_3:
+ case webrtc::H264Level::kLevel2:
+ case webrtc::H264Level::kLevel2_1:
+ case webrtc::H264Level::kLevel2_2:
+ return kVTProfileLevel_H264_Main_AutoLevel;
+ }
+
+ case webrtc::H264Profile::kProfileConstrainedHigh:
+ case webrtc::H264Profile::kProfileHigh:
+ case webrtc::H264Profile::kProfilePredictiveHigh444:
+ switch (profile_level_id.level) {
+ case webrtc::H264Level::kLevel3:
+ return kVTProfileLevel_H264_High_3_0;
+ case webrtc::H264Level::kLevel3_1:
+ return kVTProfileLevel_H264_High_3_1;
+ case webrtc::H264Level::kLevel3_2:
+ return kVTProfileLevel_H264_High_3_2;
+ case webrtc::H264Level::kLevel4:
+ return kVTProfileLevel_H264_High_4_0;
+ case webrtc::H264Level::kLevel4_1:
+ return kVTProfileLevel_H264_High_4_1;
+ case webrtc::H264Level::kLevel4_2:
+ return kVTProfileLevel_H264_High_4_2;
+ case webrtc::H264Level::kLevel5:
+ return kVTProfileLevel_H264_High_5_0;
+ case webrtc::H264Level::kLevel5_1:
+ return kVTProfileLevel_H264_High_5_1;
+ case webrtc::H264Level::kLevel5_2:
+ return kVTProfileLevel_H264_High_5_2;
+ case webrtc::H264Level::kLevel1:
+ case webrtc::H264Level::kLevel1_b:
+ case webrtc::H264Level::kLevel1_1:
+ case webrtc::H264Level::kLevel1_2:
+ case webrtc::H264Level::kLevel1_3:
+ case webrtc::H264Level::kLevel2:
+ case webrtc::H264Level::kLevel2_1:
+ case webrtc::H264Level::kLevel2_2:
+ return kVTProfileLevel_H264_High_AutoLevel;
+ }
+ }
+}
+
+// The function returns the max allowed sample rate (pixels per second) that
+// can be processed by given encoder with `profile_level_id`.
+// See https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-H.264-201610-S!!PDF-E&type=items
+// for details.
+NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id) {
+ switch (profile_level_id.level) {
+ case webrtc::H264Level::kLevel3:
+ return 10368000;
+ case webrtc::H264Level::kLevel3_1:
+ return 27648000;
+ case webrtc::H264Level::kLevel3_2:
+ return 55296000;
+ case webrtc::H264Level::kLevel4:
+ case webrtc::H264Level::kLevel4_1:
+ return 62914560;
+ case webrtc::H264Level::kLevel4_2:
+ return 133693440;
+ case webrtc::H264Level::kLevel5:
+ return 150994944;
+ case webrtc::H264Level::kLevel5_1:
+ return 251658240;
+ case webrtc::H264Level::kLevel5_2:
+ return 530841600;
+ case webrtc::H264Level::kLevel1:
+ case webrtc::H264Level::kLevel1_b:
+ case webrtc::H264Level::kLevel1_1:
+ case webrtc::H264Level::kLevel1_2:
+ case webrtc::H264Level::kLevel1_3:
+ case webrtc::H264Level::kLevel2:
+ case webrtc::H264Level::kLevel2_1:
+ case webrtc::H264Level::kLevel2_2:
+ // Zero means auto rate setting.
+ return 0;
+ }
+}
+} // namespace
+
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) {
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) * _codecInfo;
+ std::unique_ptr<webrtc::BitrateAdjuster> _bitrateAdjuster;
+ uint32_t _targetBitrateBps;
+ uint32_t _encoderBitrateBps;
+ uint32_t _encoderFrameRate;
+ uint32_t _maxAllowedFrameRate;
+ RTCH264PacketizationMode _packetizationMode;
+ absl::optional<webrtc::H264ProfileLevelId> _profile_level_id;
+ RTCVideoEncoderCallback _callback;
+ int32_t _width;
+ int32_t _height;
+ VTCompressionSessionRef _compressionSession;
+ CVPixelBufferPoolRef _pixelBufferPool;
+ RTCVideoCodecMode _mode;
+
+ webrtc::H264BitstreamParser _h264BitstreamParser;
+ std::vector<uint8_t> _frameScaleBuffer;
+}
+
+// .5 is set as a mininum to prevent overcompensating for large temporary
+// overshoots. We don't want to degrade video quality too badly.
+// .95 is set to prevent oscillations. When a lower bitrate is set on the
+// encoder than previously set, its output seems to have a brief period of
+// drastically reduced bitrate, so we want to avoid that. In steady state
+// conditions, 0.95 seems to give us better overall bitrate over long periods
+// of time.
+- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo {
+ if (self = [super init]) {
+ _codecInfo = codecInfo;
+ _bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95));
+ _packetizationMode = RTCH264PacketizationModeNonInterleaved;
+ _profile_level_id =
+ webrtc::ParseSdpForH264ProfileLevelId([codecInfo nativeSdpVideoFormat].parameters);
+ RTC_DCHECK(_profile_level_id);
+ RTC_LOG(LS_INFO) << "Using profile " << CFStringToString(ExtractProfile(*_profile_level_id));
+ RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name]);
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self destroyCompressionSession];
+}
+
+- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
+ numberOfCores:(int)numberOfCores {
+ RTC_DCHECK(settings);
+ RTC_DCHECK([settings.name isEqualToString:kRTCVideoCodecH264Name]);
+
+ _width = settings.width;
+ _height = settings.height;
+ _mode = settings.mode;
+
+ uint32_t aligned_width = (((_width + 15) >> 4) << 4);
+ uint32_t aligned_height = (((_height + 15) >> 4) << 4);
+ _maxAllowedFrameRate = static_cast<uint32_t>(GetMaxSampleRate(*_profile_level_id) /
+ (aligned_width * aligned_height));
+
+ // We can only set average bitrate on the HW encoder.
+ _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps.
+ _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps);
+ _encoderFrameRate = MIN(settings.maxFramerate, _maxAllowedFrameRate);
+ if (settings.maxFramerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) {
+ RTC_LOG(LS_WARNING) << "Initial encoder frame rate setting " << settings.maxFramerate
+ << " is larger than the "
+ << "maximal allowed frame rate " << _maxAllowedFrameRate << ".";
+ }
+
+ // TODO(tkchin): Try setting payload size via
+ // kVTCompressionPropertyKey_MaxH264SliceBytes.
+
+ return [self resetCompressionSessionWithPixelFormat:kNV12PixelFormat];
+}
+
+- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame
+ codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo
+ frameTypes:(NSArray<NSNumber *> *)frameTypes {
+ if (!_callback || !_compressionSession) {
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+ BOOL isKeyframeRequired = NO;
+
+ // Get a pixel buffer from the pool and copy frame data over.
+ if ([self resetCompressionSessionIfNeededWithFrame:frame]) {
+ isKeyframeRequired = YES;
+ }
+
+ CVPixelBufferRef pixelBuffer = nullptr;
+ if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
+ // Native frame buffer
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+ (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
+ if (![rtcPixelBuffer requiresCropping]) {
+ // This pixel buffer might have a higher resolution than what the
+ // compression session is configured to. The compression session can
+ // handle that and will output encoded frames in the configured
+ // resolution regardless of the input pixel buffer resolution.
+ pixelBuffer = rtcPixelBuffer.pixelBuffer;
+ CVBufferRetain(pixelBuffer);
+ } else {
+ // Cropping required, we need to crop and scale to a new pixel buffer.
+ pixelBuffer = CreatePixelBuffer(_pixelBufferPool);
+ if (!pixelBuffer) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ int dstWidth = CVPixelBufferGetWidth(pixelBuffer);
+ int dstHeight = CVPixelBufferGetHeight(pixelBuffer);
+ if ([rtcPixelBuffer requiresScalingToWidth:dstWidth height:dstHeight]) {
+ int size =
+ [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth height:dstHeight];
+ _frameScaleBuffer.resize(size);
+ } else {
+ _frameScaleBuffer.clear();
+ }
+ _frameScaleBuffer.shrink_to_fit();
+ if (![rtcPixelBuffer cropAndScaleTo:pixelBuffer withTempBuffer:_frameScaleBuffer.data()]) {
+ CVBufferRelease(pixelBuffer);
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ }
+ }
+
+ if (!pixelBuffer) {
+ // We did not have a native frame buffer
+ RTC_DCHECK_EQ(frame.width, _width);
+ RTC_DCHECK_EQ(frame.height, _height);
+ pixelBuffer = CreatePixelBuffer(_pixelBufferPool);
+ if (!pixelBuffer) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ RTC_DCHECK(pixelBuffer);
+ if (!CopyVideoFrameToNV12PixelBuffer([frame.buffer toI420], pixelBuffer)) {
+ RTC_LOG(LS_ERROR) << "Failed to copy frame data.";
+ CVBufferRelease(pixelBuffer);
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ }
+
+ // Check if we need a keyframe.
+ if (!isKeyframeRequired && frameTypes) {
+ for (NSNumber *frameType in frameTypes) {
+ if ((RTCFrameType)frameType.intValue == RTCFrameTypeVideoFrameKey) {
+ isKeyframeRequired = YES;
+ break;
+ }
+ }
+ }
+
+ CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000);
+ CFDictionaryRef frameProperties = nullptr;
+ if (isKeyframeRequired) {
+ CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame};
+ CFTypeRef values[] = {kCFBooleanTrue};
+ frameProperties = CreateCFTypeDictionary(keys, values, 1);
+ }
+
+ std::unique_ptr<RTCFrameEncodeParams> encodeParams;
+ encodeParams.reset(new RTCFrameEncodeParams(self,
+ codecSpecificInfo,
+ _width,
+ _height,
+ frame.timeStampNs / rtc::kNumNanosecsPerMillisec,
+ frame.timeStamp,
+ frame.rotation));
+ encodeParams->codecSpecificInfo.packetizationMode = _packetizationMode;
+
+ // Update the bitrate if needed.
+ [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:_encoderFrameRate];
+
+ OSStatus status = VTCompressionSessionEncodeFrame(_compressionSession,
+ pixelBuffer,
+ presentationTimeStamp,
+ kCMTimeInvalid,
+ frameProperties,
+ encodeParams.release(),
+ nullptr);
+ if (frameProperties) {
+ CFRelease(frameProperties);
+ }
+ if (pixelBuffer) {
+ CVBufferRelease(pixelBuffer);
+ }
+
+ if (status == kVTInvalidSessionErr) {
+ // This error occurs when entering foreground after backgrounding the app.
+ RTC_LOG(LS_ERROR) << "Invalid compression session, resetting.";
+ [self resetCompressionSessionWithPixelFormat:[self pixelFormatOfFrame:frame]];
+
+ return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
+ } else if (status == kVTVideoEncoderMalfunctionErr) {
+ // Sometimes the encoder malfunctions and needs to be restarted.
+ RTC_LOG(LS_ERROR)
+ << "Encountered video encoder malfunction error. Resetting compression session.";
+ [self resetCompressionSessionWithPixelFormat:[self pixelFormatOfFrame:frame]];
+
+ return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
+ } else if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to encode frame with code: " << status;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+- (void)setCallback:(RTCVideoEncoderCallback)callback {
+ _callback = callback;
+}
+
+- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate {
+ _targetBitrateBps = 1000 * bitrateKbit;
+ _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps);
+ if (framerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) {
+ RTC_LOG(LS_WARNING) << "Encoder frame rate setting " << framerate << " is larger than the "
+ << "maximal allowed frame rate " << _maxAllowedFrameRate << ".";
+ }
+ framerate = MIN(framerate, _maxAllowedFrameRate);
+ [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:framerate];
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+- (NSInteger)resolutionAlignment {
+ return 1;
+}
+
+- (BOOL)applyAlignmentToAllSimulcastLayers {
+ return NO;
+}
+
+- (BOOL)supportsNativeHandle {
+ return YES;
+}
+
+#pragma mark - Private
+
+- (NSInteger)releaseEncoder {
+ // Need to destroy so that the session is invalidated and won't use the
+ // callback anymore. Do not remove callback until the session is invalidated
+ // since async encoder callbacks can occur until invalidation.
+ [self destroyCompressionSession];
+ _callback = nullptr;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+- (OSType)pixelFormatOfFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ // Use NV12 for non-native frames.
+ if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+ (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
+ return CVPixelBufferGetPixelFormatType(rtcPixelBuffer.pixelBuffer);
+ }
+
+ return kNV12PixelFormat;
+}
+
+- (BOOL)resetCompressionSessionIfNeededWithFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ BOOL resetCompressionSession = NO;
+
+ // If we're capturing native frames in another pixel format than the compression session is
+ // configured with, make sure the compression session is reset using the correct pixel format.
+ OSType framePixelFormat = [self pixelFormatOfFrame:frame];
+
+ if (_compressionSession) {
+ // The pool attribute `kCVPixelBufferPixelFormatTypeKey` can contain either an array of pixel
+ // formats or a single pixel format.
+ NSDictionary *poolAttributes =
+ (__bridge NSDictionary *)CVPixelBufferPoolGetPixelBufferAttributes(_pixelBufferPool);
+ id pixelFormats =
+ [poolAttributes objectForKey:(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey];
+ NSArray<NSNumber *> *compressionSessionPixelFormats = nil;
+ if ([pixelFormats isKindOfClass:[NSArray class]]) {
+ compressionSessionPixelFormats = (NSArray *)pixelFormats;
+ } else if ([pixelFormats isKindOfClass:[NSNumber class]]) {
+ compressionSessionPixelFormats = @[ (NSNumber *)pixelFormats ];
+ }
+
+ if (![compressionSessionPixelFormats
+ containsObject:[NSNumber numberWithLong:framePixelFormat]]) {
+ resetCompressionSession = YES;
+ RTC_LOG(LS_INFO) << "Resetting compression session due to non-matching pixel format.";
+ }
+ } else {
+ resetCompressionSession = YES;
+ }
+
+ if (resetCompressionSession) {
+ [self resetCompressionSessionWithPixelFormat:framePixelFormat];
+ }
+ return resetCompressionSession;
+}
+
+- (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat {
+ [self destroyCompressionSession];
+
+ // Set source image buffer attributes. These attributes will be present on
+ // buffers retrieved from the encoder's pixel buffer pool.
+ NSDictionary *sourceAttributes = @{
+#if defined(WEBRTC_IOS) && (TARGET_OS_MACCATALYST || TARGET_OS_SIMULATOR)
+ (NSString *)kCVPixelBufferMetalCompatibilityKey : @(YES),
+#elif defined(WEBRTC_IOS)
+ (NSString *)kCVPixelBufferOpenGLESCompatibilityKey : @(YES),
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_ARCH_ARM64)
+ (NSString *)kCVPixelBufferOpenGLCompatibilityKey : @(YES),
+#endif
+ (NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{},
+ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(framePixelFormat),
+ };
+
+ NSDictionary *encoder_specs;
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ // Currently hw accl is supported above 360p on mac, below 360p
+ // the compression session will be created with hw accl disabled.
+ encoder_specs = @{
+ (NSString *)kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder : @(YES),
+ };
+
+#endif
+ OSStatus status = VTCompressionSessionCreate(
+ nullptr, // use default allocator
+ _width,
+ _height,
+ kCMVideoCodecType_H264,
+ (__bridge CFDictionaryRef)encoder_specs, // use hardware accelerated encoder if available
+ (__bridge CFDictionaryRef)sourceAttributes,
+ nullptr, // use default compressed data allocator
+ compressionOutputCallback,
+ nullptr,
+ &_compressionSession);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to create compression session: " << status;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ CFBooleanRef hwaccl_enabled = nullptr;
+ status = VTSessionCopyProperty(_compressionSession,
+ kVTCompressionPropertyKey_UsingHardwareAcceleratedVideoEncoder,
+ nullptr,
+ &hwaccl_enabled);
+ if (status == noErr && (CFBooleanGetValue(hwaccl_enabled))) {
+ RTC_LOG(LS_INFO) << "Compression session created with hw accl enabled";
+ } else {
+ RTC_LOG(LS_INFO) << "Compression session created with hw accl disabled";
+ }
+#endif
+ [self configureCompressionSession];
+
+ // The pixel buffer pool is dependent on the compression session so if the session is reset, the
+ // pool should be reset as well.
+ _pixelBufferPool = VTCompressionSessionGetPixelBufferPool(_compressionSession);
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+- (void)configureCompressionSession {
+ RTC_DCHECK(_compressionSession);
+ SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, true);
+ SetVTSessionProperty(_compressionSession,
+ kVTCompressionPropertyKey_ProfileLevel,
+ ExtractProfile(*_profile_level_id));
+ SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, false);
+ [self setEncoderBitrateBps:_targetBitrateBps frameRate:_encoderFrameRate];
+ // TODO(tkchin): Look at entropy mode and colorspace matrices.
+ // TODO(tkchin): Investigate to see if there's any way to make this work.
+ // May need it to interop with Android. Currently this call just fails.
+ // On inspecting encoder output on iOS8, this value is set to 6.
+ // internal::SetVTSessionProperty(compression_session_,
+ // kVTCompressionPropertyKey_MaxFrameDelayCount,
+ // 1);
+
+ // Set a relatively large value for keyframe emission (7200 frames or 4 minutes).
+ SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, 7200);
+ SetVTSessionProperty(
+ _compressionSession, kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration, 240);
+}
+
+- (void)destroyCompressionSession {
+ if (_compressionSession) {
+ VTCompressionSessionInvalidate(_compressionSession);
+ CFRelease(_compressionSession);
+ _compressionSession = nullptr;
+ _pixelBufferPool = nullptr;
+ }
+}
+
+- (NSString *)implementationName {
+ return @"VideoToolbox";
+}
+
+- (void)setBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate {
+ if (_encoderBitrateBps != bitrateBps || _encoderFrameRate != frameRate) {
+ [self setEncoderBitrateBps:bitrateBps frameRate:frameRate];
+ }
+}
+
+- (void)setEncoderBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate {
+ if (_compressionSession) {
+ SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AverageBitRate, bitrateBps);
+
+ // With zero `_maxAllowedFrameRate`, we fall back to automatic frame rate detection.
+ if (_maxAllowedFrameRate > 0) {
+ SetVTSessionProperty(
+ _compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, frameRate);
+ }
+
+ // TODO(tkchin): Add a helper method to set array value.
+ int64_t dataLimitBytesPerSecondValue =
+ static_cast<int64_t>(bitrateBps * kLimitToAverageBitRateFactor / 8);
+ CFNumberRef bytesPerSecond =
+ CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &dataLimitBytesPerSecondValue);
+ int64_t oneSecondValue = 1;
+ CFNumberRef oneSecond =
+ CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &oneSecondValue);
+ const void *nums[2] = {bytesPerSecond, oneSecond};
+ CFArrayRef dataRateLimits = CFArrayCreate(nullptr, nums, 2, &kCFTypeArrayCallBacks);
+ OSStatus status = VTSessionSetProperty(
+ _compressionSession, kVTCompressionPropertyKey_DataRateLimits, dataRateLimits);
+ if (bytesPerSecond) {
+ CFRelease(bytesPerSecond);
+ }
+ if (oneSecond) {
+ CFRelease(oneSecond);
+ }
+ if (dataRateLimits) {
+ CFRelease(dataRateLimits);
+ }
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to set data rate limit with code: " << status;
+ }
+
+ _encoderBitrateBps = bitrateBps;
+ _encoderFrameRate = frameRate;
+ }
+}
+
+- (void)frameWasEncoded:(OSStatus)status
+ flags:(VTEncodeInfoFlags)infoFlags
+ sampleBuffer:(CMSampleBufferRef)sampleBuffer
+ codecSpecificInfo:(id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo
+ width:(int32_t)width
+ height:(int32_t)height
+ renderTimeMs:(int64_t)renderTimeMs
+ timestamp:(uint32_t)timestamp
+ rotation:(RTCVideoRotation)rotation {
+ RTCVideoEncoderCallback callback = _callback;
+ if (!callback) {
+ return;
+ }
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "H264 encode failed with code: " << status;
+ return;
+ }
+ if (infoFlags & kVTEncodeInfo_FrameDropped) {
+ RTC_LOG(LS_INFO) << "H264 encode dropped frame.";
+ return;
+ }
+
+ BOOL isKeyframe = NO;
+ CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, 0);
+ if (attachments != nullptr && CFArrayGetCount(attachments)) {
+ CFDictionaryRef attachment =
+ static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(attachments, 0));
+ isKeyframe = !CFDictionaryContainsKey(attachment, kCMSampleAttachmentKey_NotSync);
+ }
+
+ if (isKeyframe) {
+ RTC_LOG(LS_INFO) << "Generated keyframe";
+ }
+
+ __block std::unique_ptr<rtc::Buffer> buffer = std::make_unique<rtc::Buffer>();
+ if (!webrtc::H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get())) {
+ return;
+ }
+
+ RTC_OBJC_TYPE(RTCEncodedImage) *frame = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init];
+ // This assumes ownership of `buffer` and is responsible for freeing it when done.
+ frame.buffer = [[NSData alloc] initWithBytesNoCopy:buffer->data()
+ length:buffer->size()
+ deallocator:^(void *bytes, NSUInteger size) {
+ buffer.reset();
+ }];
+ frame.encodedWidth = width;
+ frame.encodedHeight = height;
+ frame.frameType = isKeyframe ? RTCFrameTypeVideoFrameKey : RTCFrameTypeVideoFrameDelta;
+ frame.captureTimeMs = renderTimeMs;
+ frame.timeStamp = timestamp;
+ frame.rotation = rotation;
+ frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoContentTypeScreenshare :
+ RTCVideoContentTypeUnspecified;
+ frame.flags = webrtc::VideoSendTiming::kInvalid;
+
+ _h264BitstreamParser.ParseBitstream(*buffer);
+ frame.qp = @(_h264BitstreamParser.GetLastSliceQp().value_or(-1));
+
+ BOOL res = callback(frame, codecSpecificInfo);
+ if (!res) {
+ RTC_LOG(LS_ERROR) << "Encode callback failed";
+ return;
+ }
+ _bitrateAdjuster->Update(frame.buffer.length);
+}
+
+- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings {
+ return [[RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) alloc]
+ initWithThresholdsLow:kLowH264QpThreshold
+ high:kHighH264QpThreshold];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.h b/third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.h
new file mode 100644
index 0000000000..a51debb9fa
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.h
@@ -0,0 +1,19 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+#include "api/video_codecs/h264_profile_level_id.h"
+
+@interface UIDevice (H264Profile)
+
++ (absl::optional<webrtc::H264ProfileLevelId>)maxSupportedH264Profile;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.mm b/third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.mm
new file mode 100644
index 0000000000..0ef6a8d77c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/UIDevice+H264Profile.mm
@@ -0,0 +1,205 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "UIDevice+H264Profile.h"
+#import "helpers/UIDevice+RTCDevice.h"
+
+#include <algorithm>
+
+namespace {
+
+using namespace webrtc;
+
+struct SupportedH264Profile {
+ const RTCDeviceType deviceType;
+ const H264ProfileLevelId profile;
+};
+
+constexpr SupportedH264Profile kH264MaxSupportedProfiles[] = {
+ // iPhones with at least iOS 9
+ {RTCDeviceTypeIPhone13ProMax,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP848
+ {RTCDeviceTypeIPhone13Pro,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP852
+ {RTCDeviceTypeIPhone13,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP851
+ {RTCDeviceTypeIPhone13Mini,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP847
+ {RTCDeviceTypeIPhoneSE2Gen,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP820
+ {RTCDeviceTypeIPhone12ProMax,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP832
+ {RTCDeviceTypeIPhone12Pro,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP831
+ {RTCDeviceTypeIPhone12,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP830
+ {RTCDeviceTypeIPhone12Mini,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP829
+ {RTCDeviceTypeIPhone11ProMax,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP806
+ {RTCDeviceTypeIPhone11Pro,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP805
+ {RTCDeviceTypeIPhone11,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP804
+ {RTCDeviceTypeIPhoneXS,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP779
+ {RTCDeviceTypeIPhoneXSMax,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP780
+ {RTCDeviceTypeIPhoneXR,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP781
+ {RTCDeviceTypeIPhoneX,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP770
+ {RTCDeviceTypeIPhone8,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP767
+ {RTCDeviceTypeIPhone8Plus,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP768
+ {RTCDeviceTypeIPhone7,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_1}}, // https://support.apple.com/kb/SP743
+ {RTCDeviceTypeIPhone7Plus,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_1}}, // https://support.apple.com/kb/SP744
+ {RTCDeviceTypeIPhoneSE,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP738
+ {RTCDeviceTypeIPhone6S,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP726
+ {RTCDeviceTypeIPhone6SPlus,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP727
+ {RTCDeviceTypeIPhone6,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP705
+ {RTCDeviceTypeIPhone6Plus,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP706
+ {RTCDeviceTypeIPhone5SGSM,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP685
+ {RTCDeviceTypeIPhone5SGSM_CDMA,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP685
+ {RTCDeviceTypeIPhone5GSM,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP655
+ {RTCDeviceTypeIPhone5GSM_CDMA,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP655
+ {RTCDeviceTypeIPhone5CGSM,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP684
+ {RTCDeviceTypeIPhone5CGSM_CDMA,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP684
+ {RTCDeviceTypeIPhone4S,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP643
+
+ // iPods with at least iOS 9
+ {RTCDeviceTypeIPodTouch7G,
+ {H264Profile::kProfileMain, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP796
+ {RTCDeviceTypeIPodTouch6G,
+ {H264Profile::kProfileMain, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP720
+ {RTCDeviceTypeIPodTouch5G,
+ {H264Profile::kProfileMain, H264Level::kLevel3_1}}, // https://support.apple.com/kb/SP657
+
+ // iPads with at least iOS 9
+ {RTCDeviceTypeIPadAir4Gen,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP828
+ {RTCDeviceTypeIPad8,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP822
+ {RTCDeviceTypeIPadPro4Gen12Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP815
+ {RTCDeviceTypeIPadPro4Gen11Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP814
+ {RTCDeviceTypeIPadAir3Gen,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP787
+ {RTCDeviceTypeIPadMini5Gen,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP788
+ {RTCDeviceTypeIPadPro3Gen12Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP785
+ {RTCDeviceTypeIPadPro3Gen11Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP784
+ {RTCDeviceTypeIPad7Gen10Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP807
+ {RTCDeviceTypeIPad2Wifi,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP622
+ {RTCDeviceTypeIPad2GSM,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP622
+ {RTCDeviceTypeIPad2CDMA,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP622
+ {RTCDeviceTypeIPad2Wifi2,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP622
+ {RTCDeviceTypeIPadMiniWifi,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP661
+ {RTCDeviceTypeIPadMiniGSM,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP661
+ {RTCDeviceTypeIPadMiniGSM_CDMA,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP661
+ {RTCDeviceTypeIPad3Wifi,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP647
+ {RTCDeviceTypeIPad3GSM_CDMA,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP647
+ {RTCDeviceTypeIPad3GSM,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP647
+ {RTCDeviceTypeIPad4Wifi,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP662
+ {RTCDeviceTypeIPad4GSM,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP662
+ {RTCDeviceTypeIPad4GSM_CDMA,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_1}}, // https://support.apple.com/kb/SP662
+ {RTCDeviceTypeIPad5,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP751
+ {RTCDeviceTypeIPad6,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP774
+ {RTCDeviceTypeIPadAirWifi,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP692
+ {RTCDeviceTypeIPadAirCellular,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP692
+ {RTCDeviceTypeIPadAirWifiCellular,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP692
+ {RTCDeviceTypeIPadAir2,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP708
+ {RTCDeviceTypeIPadMini2GWifi,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP693
+ {RTCDeviceTypeIPadMini2GCellular,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP693
+ {RTCDeviceTypeIPadMini2GWifiCellular,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP693
+ {RTCDeviceTypeIPadMini3,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP709
+ {RTCDeviceTypeIPadMini4,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP725
+ {RTCDeviceTypeIPadPro9Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP739
+ {RTCDeviceTypeIPadPro12Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/sp723
+ {RTCDeviceTypeIPadPro12Inch2,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP761
+ {RTCDeviceTypeIPadPro10Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP762
+ {RTCDeviceTypeIPadMini6,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP850
+ {RTCDeviceTypeIPad9,
+ {H264Profile::kProfileHigh, H264Level::kLevel4_2}}, // https://support.apple.com/kb/SP849
+ {RTCDeviceTypeIPadPro5Gen12Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP844
+ {RTCDeviceTypeIPadPro5Gen11Inch,
+ {H264Profile::kProfileHigh, H264Level::kLevel5_2}}, // https://support.apple.com/kb/SP843
+};
+
+absl::optional<H264ProfileLevelId> FindMaxSupportedProfileForDevice(RTCDeviceType deviceType) {
+ const auto* result = std::find_if(std::begin(kH264MaxSupportedProfiles),
+ std::end(kH264MaxSupportedProfiles),
+ [deviceType](const SupportedH264Profile& supportedProfile) {
+ return supportedProfile.deviceType == deviceType;
+ });
+ if (result != std::end(kH264MaxSupportedProfiles)) {
+ return result->profile;
+ }
+ return absl::nullopt;
+}
+
+} // namespace
+
+@implementation UIDevice (H264Profile)
+
++ (absl::optional<webrtc::H264ProfileLevelId>)maxSupportedH264Profile {
+ return FindMaxSupportedProfileForDevice([self deviceType]);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/helpers.cc b/third_party/libwebrtc/sdk/objc/components/video_codec/helpers.cc
new file mode 100644
index 0000000000..ac957f1b49
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/helpers.cc
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#include "helpers.h"
+
+#include <string>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+// Copies characters from a CFStringRef into a std::string.
+std::string CFStringToString(const CFStringRef cf_string) {
+ RTC_DCHECK(cf_string);
+ std::string std_string;
+ // Get the size needed for UTF8 plus terminating character.
+ size_t buffer_size =
+ CFStringGetMaximumSizeForEncoding(CFStringGetLength(cf_string),
+ kCFStringEncodingUTF8) +
+ 1;
+ std::unique_ptr<char[]> buffer(new char[buffer_size]);
+ if (CFStringGetCString(cf_string, buffer.get(), buffer_size,
+ kCFStringEncodingUTF8)) {
+ // Copy over the characters.
+ std_string.assign(buffer.get());
+ }
+ return std_string;
+}
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session,
+ CFStringRef key,
+ int32_t value) {
+ CFNumberRef cfNum =
+ CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &value);
+ OSStatus status = VTSessionSetProperty(session, key, cfNum);
+ CFRelease(cfNum);
+ if (status != noErr) {
+ std::string key_string = CFStringToString(key);
+ RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
+ << " to " << value << ": " << status;
+ }
+}
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session,
+ CFStringRef key,
+ uint32_t value) {
+ int64_t value_64 = value;
+ CFNumberRef cfNum =
+ CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &value_64);
+ OSStatus status = VTSessionSetProperty(session, key, cfNum);
+ CFRelease(cfNum);
+ if (status != noErr) {
+ std::string key_string = CFStringToString(key);
+ RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
+ << " to " << value << ": " << status;
+ }
+}
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session, CFStringRef key, bool value) {
+ CFBooleanRef cf_bool = (value) ? kCFBooleanTrue : kCFBooleanFalse;
+ OSStatus status = VTSessionSetProperty(session, key, cf_bool);
+ if (status != noErr) {
+ std::string key_string = CFStringToString(key);
+ RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
+ << " to " << value << ": " << status;
+ }
+}
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session,
+ CFStringRef key,
+ CFStringRef value) {
+ OSStatus status = VTSessionSetProperty(session, key, value);
+ if (status != noErr) {
+ std::string key_string = CFStringToString(key);
+ std::string val_string = CFStringToString(value);
+ RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
+ << " to " << val_string << ": " << status;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/helpers.h b/third_party/libwebrtc/sdk/objc/components/video_codec/helpers.h
new file mode 100644
index 0000000000..7c9ef1cd87
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/helpers.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_HELPERS_H_
+#define SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_HELPERS_H_
+
+#include <CoreFoundation/CoreFoundation.h>
+#include <VideoToolbox/VideoToolbox.h>
+#include <string>
+
+// Convenience function for creating a dictionary.
+inline CFDictionaryRef CreateCFTypeDictionary(CFTypeRef* keys,
+ CFTypeRef* values,
+ size_t size) {
+ return CFDictionaryCreate(kCFAllocatorDefault, keys, values, size,
+ &kCFTypeDictionaryKeyCallBacks,
+ &kCFTypeDictionaryValueCallBacks);
+}
+
+// Copies characters from a CFStringRef into a std::string.
+std::string CFStringToString(CFStringRef cf_string);
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session, CFStringRef key, int32_t value);
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session,
+ CFStringRef key,
+ uint32_t value);
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session, CFStringRef key, bool value);
+
+// Convenience function for setting a VT property.
+void SetVTSessionProperty(VTSessionRef session,
+ CFStringRef key,
+ CFStringRef value);
+
+#endif // SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_HELPERS_H_
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.cc b/third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.cc
new file mode 100644
index 0000000000..b7330e1f9c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.cc
@@ -0,0 +1,327 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#include "sdk/objc/components/video_codec/nalu_rewriter.h"
+
+#include <CoreFoundation/CoreFoundation.h>
+#include <memory>
+#include <vector>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+using H264::kAud;
+using H264::kSps;
+using H264::NaluIndex;
+using H264::NaluType;
+using H264::ParseNaluType;
+
+const char kAnnexBHeaderBytes[4] = {0, 0, 0, 1};
+const size_t kAvccHeaderByteSize = sizeof(uint32_t);
+
+bool H264CMSampleBufferToAnnexBBuffer(CMSampleBufferRef avcc_sample_buffer,
+ bool is_keyframe,
+ rtc::Buffer* annexb_buffer) {
+ RTC_DCHECK(avcc_sample_buffer);
+
+ // Get format description from the sample buffer.
+ CMVideoFormatDescriptionRef description =
+ CMSampleBufferGetFormatDescription(avcc_sample_buffer);
+ if (description == nullptr) {
+ RTC_LOG(LS_ERROR) << "Failed to get sample buffer's description.";
+ return false;
+ }
+
+ // Get parameter set information.
+ int nalu_header_size = 0;
+ size_t param_set_count = 0;
+ OSStatus status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
+ description, 0, nullptr, nullptr, &param_set_count, &nalu_header_size);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to get parameter set.";
+ return false;
+ }
+ RTC_CHECK_EQ(nalu_header_size, kAvccHeaderByteSize);
+ RTC_DCHECK_EQ(param_set_count, 2);
+
+ // Truncate any previous data in the buffer without changing its capacity.
+ annexb_buffer->SetSize(0);
+
+ // Place all parameter sets at the front of buffer.
+ if (is_keyframe) {
+ size_t param_set_size = 0;
+ const uint8_t* param_set = nullptr;
+ for (size_t i = 0; i < param_set_count; ++i) {
+ status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
+ description, i, &param_set, &param_set_size, nullptr, nullptr);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to get parameter set.";
+ return false;
+ }
+ // Update buffer.
+ annexb_buffer->AppendData(kAnnexBHeaderBytes, sizeof(kAnnexBHeaderBytes));
+ annexb_buffer->AppendData(reinterpret_cast<const char*>(param_set),
+ param_set_size);
+ }
+ }
+
+ // Get block buffer from the sample buffer.
+ CMBlockBufferRef block_buffer =
+ CMSampleBufferGetDataBuffer(avcc_sample_buffer);
+ if (block_buffer == nullptr) {
+ RTC_LOG(LS_ERROR) << "Failed to get sample buffer's block buffer.";
+ return false;
+ }
+ CMBlockBufferRef contiguous_buffer = nullptr;
+ // Make sure block buffer is contiguous.
+ if (!CMBlockBufferIsRangeContiguous(block_buffer, 0, 0)) {
+ status = CMBlockBufferCreateContiguous(
+ nullptr, block_buffer, nullptr, nullptr, 0, 0, 0, &contiguous_buffer);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to flatten non-contiguous block buffer: "
+ << status;
+ return false;
+ }
+ } else {
+ contiguous_buffer = block_buffer;
+ // Retain to make cleanup easier.
+ CFRetain(contiguous_buffer);
+ block_buffer = nullptr;
+ }
+
+ // Now copy the actual data.
+ char* data_ptr = nullptr;
+ size_t block_buffer_size = CMBlockBufferGetDataLength(contiguous_buffer);
+ status = CMBlockBufferGetDataPointer(contiguous_buffer, 0, nullptr, nullptr,
+ &data_ptr);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to get block buffer data.";
+ CFRelease(contiguous_buffer);
+ return false;
+ }
+ size_t bytes_remaining = block_buffer_size;
+ while (bytes_remaining > 0) {
+ // The size type here must match `nalu_header_size`, we expect 4 bytes.
+ // Read the length of the next packet of data. Must convert from big endian
+ // to host endian.
+ RTC_DCHECK_GE(bytes_remaining, (size_t)nalu_header_size);
+ uint32_t* uint32_data_ptr = reinterpret_cast<uint32_t*>(data_ptr);
+ uint32_t packet_size = CFSwapInt32BigToHost(*uint32_data_ptr);
+ // Update buffer.
+ annexb_buffer->AppendData(kAnnexBHeaderBytes, sizeof(kAnnexBHeaderBytes));
+ annexb_buffer->AppendData(data_ptr + nalu_header_size, packet_size);
+
+ size_t bytes_written = packet_size + sizeof(kAnnexBHeaderBytes);
+ bytes_remaining -= bytes_written;
+ data_ptr += bytes_written;
+ }
+ RTC_DCHECK_EQ(bytes_remaining, (size_t)0);
+
+ CFRelease(contiguous_buffer);
+ return true;
+}
+
+bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
+ size_t annexb_buffer_size,
+ CMVideoFormatDescriptionRef video_format,
+ CMSampleBufferRef* out_sample_buffer,
+ CMMemoryPoolRef memory_pool) {
+ RTC_DCHECK(annexb_buffer);
+ RTC_DCHECK(out_sample_buffer);
+ RTC_DCHECK(video_format);
+ *out_sample_buffer = nullptr;
+
+ AnnexBBufferReader reader(annexb_buffer, annexb_buffer_size);
+ if (reader.SeekToNextNaluOfType(kSps)) {
+ // Buffer contains an SPS NALU - skip it and the following PPS
+ const uint8_t* data;
+ size_t data_len;
+ if (!reader.ReadNalu(&data, &data_len)) {
+ RTC_LOG(LS_ERROR) << "Failed to read SPS";
+ return false;
+ }
+ if (!reader.ReadNalu(&data, &data_len)) {
+ RTC_LOG(LS_ERROR) << "Failed to read PPS";
+ return false;
+ }
+ } else {
+ // No SPS NALU - start reading from the first NALU in the buffer
+ reader.SeekToStart();
+ }
+
+ // Allocate memory as a block buffer.
+ CMBlockBufferRef block_buffer = nullptr;
+ CFAllocatorRef block_allocator = CMMemoryPoolGetAllocator(memory_pool);
+ OSStatus status = CMBlockBufferCreateWithMemoryBlock(
+ kCFAllocatorDefault, nullptr, reader.BytesRemaining(), block_allocator,
+ nullptr, 0, reader.BytesRemaining(), kCMBlockBufferAssureMemoryNowFlag,
+ &block_buffer);
+ if (status != kCMBlockBufferNoErr) {
+ RTC_LOG(LS_ERROR) << "Failed to create block buffer.";
+ return false;
+ }
+
+ // Make sure block buffer is contiguous.
+ CMBlockBufferRef contiguous_buffer = nullptr;
+ if (!CMBlockBufferIsRangeContiguous(block_buffer, 0, 0)) {
+ status = CMBlockBufferCreateContiguous(kCFAllocatorDefault, block_buffer,
+ block_allocator, nullptr, 0, 0, 0,
+ &contiguous_buffer);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to flatten non-contiguous block buffer: "
+ << status;
+ CFRelease(block_buffer);
+ return false;
+ }
+ } else {
+ contiguous_buffer = block_buffer;
+ block_buffer = nullptr;
+ }
+
+ // Get a raw pointer into allocated memory.
+ size_t block_buffer_size = 0;
+ char* data_ptr = nullptr;
+ status = CMBlockBufferGetDataPointer(contiguous_buffer, 0, nullptr,
+ &block_buffer_size, &data_ptr);
+ if (status != kCMBlockBufferNoErr) {
+ RTC_LOG(LS_ERROR) << "Failed to get block buffer data pointer.";
+ CFRelease(contiguous_buffer);
+ return false;
+ }
+ RTC_DCHECK(block_buffer_size == reader.BytesRemaining());
+
+ // Write Avcc NALUs into block buffer memory.
+ AvccBufferWriter writer(reinterpret_cast<uint8_t*>(data_ptr),
+ block_buffer_size);
+ while (reader.BytesRemaining() > 0) {
+ const uint8_t* nalu_data_ptr = nullptr;
+ size_t nalu_data_size = 0;
+ if (reader.ReadNalu(&nalu_data_ptr, &nalu_data_size)) {
+ writer.WriteNalu(nalu_data_ptr, nalu_data_size);
+ }
+ }
+
+ // Create sample buffer.
+ status = CMSampleBufferCreate(kCFAllocatorDefault, contiguous_buffer, true,
+ nullptr, nullptr, video_format, 1, 0, nullptr,
+ 0, nullptr, out_sample_buffer);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to create sample buffer.";
+ CFRelease(contiguous_buffer);
+ return false;
+ }
+ CFRelease(contiguous_buffer);
+ return true;
+}
+
+CMVideoFormatDescriptionRef CreateVideoFormatDescription(
+ const uint8_t* annexb_buffer,
+ size_t annexb_buffer_size) {
+ const uint8_t* param_set_ptrs[2] = {};
+ size_t param_set_sizes[2] = {};
+ AnnexBBufferReader reader(annexb_buffer, annexb_buffer_size);
+ // Skip everyting before the SPS, then read the SPS and PPS
+ if (!reader.SeekToNextNaluOfType(kSps)) {
+ return nullptr;
+ }
+ if (!reader.ReadNalu(&param_set_ptrs[0], &param_set_sizes[0])) {
+ RTC_LOG(LS_ERROR) << "Failed to read SPS";
+ return nullptr;
+ }
+ if (!reader.ReadNalu(&param_set_ptrs[1], &param_set_sizes[1])) {
+ RTC_LOG(LS_ERROR) << "Failed to read PPS";
+ return nullptr;
+ }
+
+ // Parse the SPS and PPS into a CMVideoFormatDescription.
+ CMVideoFormatDescriptionRef description = nullptr;
+ OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(
+ kCFAllocatorDefault, 2, param_set_ptrs, param_set_sizes, 4, &description);
+ if (status != noErr) {
+ RTC_LOG(LS_ERROR) << "Failed to create video format description.";
+ return nullptr;
+ }
+ return description;
+}
+
+AnnexBBufferReader::AnnexBBufferReader(const uint8_t* annexb_buffer,
+ size_t length)
+ : start_(annexb_buffer), length_(length) {
+ RTC_DCHECK(annexb_buffer);
+ offsets_ = H264::FindNaluIndices(annexb_buffer, length);
+ offset_ = offsets_.begin();
+}
+
+AnnexBBufferReader::~AnnexBBufferReader() = default;
+
+bool AnnexBBufferReader::ReadNalu(const uint8_t** out_nalu,
+ size_t* out_length) {
+ RTC_DCHECK(out_nalu);
+ RTC_DCHECK(out_length);
+ *out_nalu = nullptr;
+ *out_length = 0;
+
+ if (offset_ == offsets_.end()) {
+ return false;
+ }
+ *out_nalu = start_ + offset_->payload_start_offset;
+ *out_length = offset_->payload_size;
+ ++offset_;
+ return true;
+}
+
+size_t AnnexBBufferReader::BytesRemaining() const {
+ if (offset_ == offsets_.end()) {
+ return 0;
+ }
+ return length_ - offset_->start_offset;
+}
+
+void AnnexBBufferReader::SeekToStart() {
+ offset_ = offsets_.begin();
+}
+
+bool AnnexBBufferReader::SeekToNextNaluOfType(NaluType type) {
+ for (; offset_ != offsets_.end(); ++offset_) {
+ if (offset_->payload_size < 1)
+ continue;
+ if (ParseNaluType(*(start_ + offset_->payload_start_offset)) == type)
+ return true;
+ }
+ return false;
+}
+AvccBufferWriter::AvccBufferWriter(uint8_t* const avcc_buffer, size_t length)
+ : start_(avcc_buffer), offset_(0), length_(length) {
+ RTC_DCHECK(avcc_buffer);
+}
+
+bool AvccBufferWriter::WriteNalu(const uint8_t* data, size_t data_size) {
+ // Check if we can write this length of data.
+ if (data_size + kAvccHeaderByteSize > BytesRemaining()) {
+ return false;
+ }
+ // Write length header, which needs to be big endian.
+ uint32_t big_endian_length = CFSwapInt32HostToBig(data_size);
+ memcpy(start_ + offset_, &big_endian_length, sizeof(big_endian_length));
+ offset_ += sizeof(big_endian_length);
+ // Write data.
+ memcpy(start_ + offset_, data, data_size);
+ offset_ += data_size;
+ return true;
+}
+
+size_t AvccBufferWriter::BytesRemaining() const {
+ return length_ - offset_;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.h b/third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.h
new file mode 100644
index 0000000000..c6474971e2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_codec/nalu_rewriter.h
@@ -0,0 +1,113 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_NALU_REWRITER_H_
+#define SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_NALU_REWRITER_H_
+
+#include "modules/video_coding/codecs/h264/include/h264.h"
+
+#include <CoreMedia/CoreMedia.h>
+#include <vector>
+
+#include "common_video/h264/h264_common.h"
+#include "rtc_base/buffer.h"
+
+using webrtc::H264::NaluIndex;
+
+namespace webrtc {
+
+// Converts a sample buffer emitted from the VideoToolbox encoder into a buffer
+// suitable for RTP. The sample buffer is in avcc format whereas the rtp buffer
+// needs to be in Annex B format. Data is written directly to `annexb_buffer`.
+bool H264CMSampleBufferToAnnexBBuffer(CMSampleBufferRef avcc_sample_buffer,
+ bool is_keyframe,
+ rtc::Buffer* annexb_buffer);
+
+// Converts a buffer received from RTP into a sample buffer suitable for the
+// VideoToolbox decoder. The RTP buffer is in annex b format whereas the sample
+// buffer is in avcc format.
+// If `is_keyframe` is true then `video_format` is ignored since the format will
+// be read from the buffer. Otherwise `video_format` must be provided.
+// Caller is responsible for releasing the created sample buffer.
+bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
+ size_t annexb_buffer_size,
+ CMVideoFormatDescriptionRef video_format,
+ CMSampleBufferRef* out_sample_buffer,
+ CMMemoryPoolRef memory_pool);
+
+// Returns a video format description created from the sps/pps information in
+// the Annex B buffer. If there is no such information, nullptr is returned.
+// The caller is responsible for releasing the description.
+CMVideoFormatDescriptionRef CreateVideoFormatDescription(
+ const uint8_t* annexb_buffer,
+ size_t annexb_buffer_size);
+
+// Helper class for reading NALUs from an RTP Annex B buffer.
+class AnnexBBufferReader final {
+ public:
+ AnnexBBufferReader(const uint8_t* annexb_buffer, size_t length);
+ ~AnnexBBufferReader();
+ AnnexBBufferReader(const AnnexBBufferReader& other) = delete;
+ void operator=(const AnnexBBufferReader& other) = delete;
+
+ // Returns a pointer to the beginning of the next NALU slice without the
+ // header bytes and its length. Returns false if no more slices remain.
+ bool ReadNalu(const uint8_t** out_nalu, size_t* out_length);
+
+ // Returns the number of unread NALU bytes, including the size of the header.
+ // If the buffer has no remaining NALUs this will return zero.
+ size_t BytesRemaining() const;
+
+ // Reset the reader to start reading from the first NALU
+ void SeekToStart();
+
+ // Seek to the next position that holds a NALU of the desired type,
+ // or the end if no such NALU is found.
+ // Return true if a NALU of the desired type is found, false if we
+ // reached the end instead
+ bool SeekToNextNaluOfType(H264::NaluType type);
+
+ private:
+ // Returns the the next offset that contains NALU data.
+ size_t FindNextNaluHeader(const uint8_t* start,
+ size_t length,
+ size_t offset) const;
+
+ const uint8_t* const start_;
+ std::vector<NaluIndex> offsets_;
+ std::vector<NaluIndex>::iterator offset_;
+ const size_t length_;
+};
+
+// Helper class for writing NALUs using avcc format into a buffer.
+class AvccBufferWriter final {
+ public:
+ AvccBufferWriter(uint8_t* const avcc_buffer, size_t length);
+ ~AvccBufferWriter() {}
+ AvccBufferWriter(const AvccBufferWriter& other) = delete;
+ void operator=(const AvccBufferWriter& other) = delete;
+
+ // Writes the data slice into the buffer. Returns false if there isn't
+ // enough space left.
+ bool WriteNalu(const uint8_t* data, size_t data_size);
+
+ // Returns the unused bytes in the buffer.
+ size_t BytesRemaining() const;
+
+ private:
+ uint8_t* const start_;
+ size_t offset_;
+ const size_t length_;
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_FRAMEWORK_CLASSES_VIDEOTOOLBOX_NALU_REWRITER_H_
diff --git a/third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h b/third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h
new file mode 100644
index 0000000000..664d9bb904
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "RTCMacros.h"
+#import "RTCVideoFrameBuffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** RTCVideoFrameBuffer containing a CVPixelBufferRef */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCCVPixelBuffer) : NSObject <RTC_OBJC_TYPE(RTCVideoFrameBuffer)>
+
+@property(nonatomic, readonly) CVPixelBufferRef pixelBuffer;
+@property(nonatomic, readonly) int cropX;
+@property(nonatomic, readonly) int cropY;
+@property(nonatomic, readonly) int cropWidth;
+@property(nonatomic, readonly) int cropHeight;
+
++ (NSSet<NSNumber *> *)supportedPixelFormats;
+
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer;
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
+ adaptedWidth:(int)adaptedWidth
+ adaptedHeight:(int)adaptedHeight
+ cropWidth:(int)cropWidth
+ cropHeight:(int)cropHeight
+ cropX:(int)cropX
+ cropY:(int)cropY;
+
+- (BOOL)requiresCropping;
+- (BOOL)requiresScalingToWidth:(int)width height:(int)height;
+- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height;
+
+/** The minimum size of the `tmpBuffer` must be the number of bytes returned from the
+ * bufferSizeForCroppingAndScalingToWidth:height: method.
+ * If that size is 0, the `tmpBuffer` may be nil.
+ */
+- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer
+ withTempBuffer:(nullable uint8_t *)tmpBuffer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm b/third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm
new file mode 100644
index 0000000000..20b97d02b7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm
@@ -0,0 +1,352 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCCVPixelBuffer.h"
+
+#import "api/video_frame_buffer/RTCNativeMutableI420Buffer.h"
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "libyuv/include/libyuv.h"
+
+#if !defined(NDEBUG) && defined(WEBRTC_IOS)
+#import <UIKit/UIKit.h>
+#import <VideoToolbox/VideoToolbox.h>
+#endif
+
+@implementation RTC_OBJC_TYPE (RTCCVPixelBuffer) {
+ int _width;
+ int _height;
+ int _bufferWidth;
+ int _bufferHeight;
+ int _cropWidth;
+ int _cropHeight;
+}
+
+@synthesize pixelBuffer = _pixelBuffer;
+@synthesize cropX = _cropX;
+@synthesize cropY = _cropY;
+@synthesize cropWidth = _cropWidth;
+@synthesize cropHeight = _cropHeight;
+
++ (NSSet<NSNumber*>*)supportedPixelFormats {
+ return [NSSet setWithObjects:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
+ @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
+ @(kCVPixelFormatType_32BGRA),
+ @(kCVPixelFormatType_32ARGB),
+ nil];
+}
+
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer {
+ return [self initWithPixelBuffer:pixelBuffer
+ adaptedWidth:CVPixelBufferGetWidth(pixelBuffer)
+ adaptedHeight:CVPixelBufferGetHeight(pixelBuffer)
+ cropWidth:CVPixelBufferGetWidth(pixelBuffer)
+ cropHeight:CVPixelBufferGetHeight(pixelBuffer)
+ cropX:0
+ cropY:0];
+}
+
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
+ adaptedWidth:(int)adaptedWidth
+ adaptedHeight:(int)adaptedHeight
+ cropWidth:(int)cropWidth
+ cropHeight:(int)cropHeight
+ cropX:(int)cropX
+ cropY:(int)cropY {
+ if (self = [super init]) {
+ _width = adaptedWidth;
+ _height = adaptedHeight;
+ _pixelBuffer = pixelBuffer;
+ _bufferWidth = CVPixelBufferGetWidth(_pixelBuffer);
+ _bufferHeight = CVPixelBufferGetHeight(_pixelBuffer);
+ _cropWidth = cropWidth;
+ _cropHeight = cropHeight;
+ // Can only crop at even pixels.
+ _cropX = cropX & ~1;
+ _cropY = cropY & ~1;
+ CVBufferRetain(_pixelBuffer);
+ }
+
+ return self;
+}
+
+- (void)dealloc {
+ CVBufferRelease(_pixelBuffer);
+}
+
+- (int)width {
+ return _width;
+}
+
+- (int)height {
+ return _height;
+}
+
+- (BOOL)requiresCropping {
+ return _cropWidth != _bufferWidth || _cropHeight != _bufferHeight;
+}
+
+- (BOOL)requiresScalingToWidth:(int)width height:(int)height {
+ return _cropWidth != width || _cropHeight != height;
+}
+
+- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height {
+ const OSType srcPixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
+ switch (srcPixelFormat) {
+ case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
+ case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
+ int srcChromaWidth = (_cropWidth + 1) / 2;
+ int srcChromaHeight = (_cropHeight + 1) / 2;
+ int dstChromaWidth = (width + 1) / 2;
+ int dstChromaHeight = (height + 1) / 2;
+
+ return srcChromaWidth * srcChromaHeight * 2 + dstChromaWidth * dstChromaHeight * 2;
+ }
+ case kCVPixelFormatType_32BGRA:
+ case kCVPixelFormatType_32ARGB: {
+ return 0; // Scaling RGBA frames does not require a temporary buffer.
+ }
+ }
+ RTC_DCHECK_NOTREACHED() << "Unsupported pixel format.";
+ return 0;
+}
+
+- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer
+ withTempBuffer:(nullable uint8_t*)tmpBuffer {
+ const OSType srcPixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
+ const OSType dstPixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer);
+
+ switch (srcPixelFormat) {
+ case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
+ case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
+ size_t dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
+ size_t dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
+ if (dstWidth > 0 && dstHeight > 0) {
+ RTC_DCHECK(dstPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
+ dstPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
+ if ([self requiresScalingToWidth:dstWidth height:dstHeight]) {
+ RTC_DCHECK(tmpBuffer);
+ }
+ [self cropAndScaleNV12To:outputPixelBuffer withTempBuffer:tmpBuffer];
+ }
+ break;
+ }
+ case kCVPixelFormatType_32BGRA:
+ case kCVPixelFormatType_32ARGB: {
+ RTC_DCHECK(srcPixelFormat == dstPixelFormat);
+ [self cropAndScaleARGBTo:outputPixelBuffer];
+ break;
+ }
+ default: {
+ RTC_DCHECK_NOTREACHED() << "Unsupported pixel format.";
+ }
+ }
+
+ return YES;
+}
+
+- (id<RTC_OBJC_TYPE(RTCI420Buffer)>)toI420 {
+ const OSType pixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
+
+ CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+
+ RTC_OBJC_TYPE(RTCMutableI420Buffer)* i420Buffer =
+ [[RTC_OBJC_TYPE(RTCMutableI420Buffer) alloc] initWithWidth:[self width] height:[self height]];
+
+ switch (pixelFormat) {
+ case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
+ case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
+ const uint8_t* srcY =
+ static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
+ const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
+ const uint8_t* srcUV =
+ static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
+ const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
+
+ // Crop just by modifying pointers.
+ srcY += srcYStride * _cropY + _cropX;
+ srcUV += srcUVStride * (_cropY / 2) + _cropX;
+
+ // TODO(magjed): Use a frame buffer pool.
+ webrtc::NV12ToI420Scaler nv12ToI420Scaler;
+ nv12ToI420Scaler.NV12ToI420Scale(srcY,
+ srcYStride,
+ srcUV,
+ srcUVStride,
+ _cropWidth,
+ _cropHeight,
+ i420Buffer.mutableDataY,
+ i420Buffer.strideY,
+ i420Buffer.mutableDataU,
+ i420Buffer.strideU,
+ i420Buffer.mutableDataV,
+ i420Buffer.strideV,
+ i420Buffer.width,
+ i420Buffer.height);
+ break;
+ }
+ case kCVPixelFormatType_32BGRA:
+ case kCVPixelFormatType_32ARGB: {
+ CVPixelBufferRef scaledPixelBuffer = NULL;
+ CVPixelBufferRef sourcePixelBuffer = NULL;
+ if ([self requiresCropping] ||
+ [self requiresScalingToWidth:i420Buffer.width height:i420Buffer.height]) {
+ CVPixelBufferCreate(
+ NULL, i420Buffer.width, i420Buffer.height, pixelFormat, NULL, &scaledPixelBuffer);
+ [self cropAndScaleTo:scaledPixelBuffer withTempBuffer:NULL];
+
+ CVPixelBufferLockBaseAddress(scaledPixelBuffer, kCVPixelBufferLock_ReadOnly);
+ sourcePixelBuffer = scaledPixelBuffer;
+ } else {
+ sourcePixelBuffer = _pixelBuffer;
+ }
+ const uint8_t* src = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(sourcePixelBuffer));
+ const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(sourcePixelBuffer);
+
+ if (pixelFormat == kCVPixelFormatType_32BGRA) {
+ // Corresponds to libyuv::FOURCC_ARGB
+ libyuv::ARGBToI420(src,
+ bytesPerRow,
+ i420Buffer.mutableDataY,
+ i420Buffer.strideY,
+ i420Buffer.mutableDataU,
+ i420Buffer.strideU,
+ i420Buffer.mutableDataV,
+ i420Buffer.strideV,
+ i420Buffer.width,
+ i420Buffer.height);
+ } else if (pixelFormat == kCVPixelFormatType_32ARGB) {
+ // Corresponds to libyuv::FOURCC_BGRA
+ libyuv::BGRAToI420(src,
+ bytesPerRow,
+ i420Buffer.mutableDataY,
+ i420Buffer.strideY,
+ i420Buffer.mutableDataU,
+ i420Buffer.strideU,
+ i420Buffer.mutableDataV,
+ i420Buffer.strideV,
+ i420Buffer.width,
+ i420Buffer.height);
+ }
+
+ if (scaledPixelBuffer) {
+ CVPixelBufferUnlockBaseAddress(scaledPixelBuffer, kCVPixelBufferLock_ReadOnly);
+ CVBufferRelease(scaledPixelBuffer);
+ }
+ break;
+ }
+ default: {
+ RTC_DCHECK_NOTREACHED() << "Unsupported pixel format.";
+ }
+ }
+
+ CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+
+ return i420Buffer;
+}
+
+#pragma mark - Debugging
+
+#if !defined(NDEBUG) && defined(WEBRTC_IOS)
+- (id)debugQuickLookObject {
+ CGImageRef cgImage;
+ VTCreateCGImageFromCVPixelBuffer(_pixelBuffer, NULL, &cgImage);
+ UIImage *image = [UIImage imageWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationUp];
+ CGImageRelease(cgImage);
+ return image;
+}
+#endif
+
+#pragma mark - Private
+
+- (void)cropAndScaleNV12To:(CVPixelBufferRef)outputPixelBuffer withTempBuffer:(uint8_t*)tmpBuffer {
+ // Prepare output pointers.
+ CVReturn cvRet = CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
+ if (cvRet != kCVReturnSuccess) {
+ RTC_LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
+ }
+ const int dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
+ const int dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
+ uint8_t* dstY =
+ reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0));
+ const int dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
+ uint8_t* dstUV =
+ reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1));
+ const int dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
+
+ // Prepare source pointers.
+ CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+ const uint8_t* srcY = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
+ const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
+ const uint8_t* srcUV = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
+ const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
+
+ // Crop just by modifying pointers.
+ srcY += srcYStride * _cropY + _cropX;
+ srcUV += srcUVStride * (_cropY / 2) + _cropX;
+
+ webrtc::NV12Scale(tmpBuffer,
+ srcY,
+ srcYStride,
+ srcUV,
+ srcUVStride,
+ _cropWidth,
+ _cropHeight,
+ dstY,
+ dstYStride,
+ dstUV,
+ dstUVStride,
+ dstWidth,
+ dstHeight);
+
+ CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+ CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
+}
+
+- (void)cropAndScaleARGBTo:(CVPixelBufferRef)outputPixelBuffer {
+ // Prepare output pointers.
+ CVReturn cvRet = CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
+ if (cvRet != kCVReturnSuccess) {
+ RTC_LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
+ }
+ const int dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
+ const int dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
+
+ uint8_t* dst = reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddress(outputPixelBuffer));
+ const int dstStride = CVPixelBufferGetBytesPerRow(outputPixelBuffer);
+
+ // Prepare source pointers.
+ CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+ const uint8_t* src = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(_pixelBuffer));
+ const int srcStride = CVPixelBufferGetBytesPerRow(_pixelBuffer);
+
+ // Crop just by modifying pointers. Need to ensure that src pointer points to a byte corresponding
+ // to the start of a new pixel (byte with B for BGRA) so that libyuv scales correctly.
+ const int bytesPerPixel = 4;
+ src += srcStride * _cropY + (_cropX * bytesPerPixel);
+
+ // kCVPixelFormatType_32BGRA corresponds to libyuv::FOURCC_ARGB
+ libyuv::ARGBScale(src,
+ srcStride,
+ _cropWidth,
+ _cropHeight,
+ dst,
+ dstStride,
+ dstWidth,
+ dstHeight,
+ libyuv::kFilterBox);
+
+ CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+ CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.h b/third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.h
new file mode 100644
index 0000000000..32ab6877f0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+#import <CoreMedia/CoreMedia.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface AVCaptureSession (DevicePosition)
+
+// Check the image's EXIF for the camera the image came from.
++ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.mm b/third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.mm
new file mode 100644
index 0000000000..0814ecc6c5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/AVCaptureSession+DevicePosition.mm
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "AVCaptureSession+DevicePosition.h"
+
+BOOL CFStringContainsString(CFStringRef theString, CFStringRef stringToFind) {
+ return CFStringFindWithOptions(theString,
+ stringToFind,
+ CFRangeMake(0, CFStringGetLength(theString)),
+ kCFCompareCaseInsensitive,
+ nil);
+}
+
+@implementation AVCaptureSession (DevicePosition)
+
++ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer {
+ // Check the image's EXIF for the camera the image came from.
+ AVCaptureDevicePosition cameraPosition = AVCaptureDevicePositionUnspecified;
+ CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(
+ kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate);
+ if (attachments) {
+ int size = CFDictionaryGetCount(attachments);
+ if (size > 0) {
+ CFDictionaryRef cfExifDictVal = nil;
+ if (CFDictionaryGetValueIfPresent(
+ attachments, (const void *)CFSTR("{Exif}"), (const void **)&cfExifDictVal)) {
+ CFStringRef cfLensModelStrVal;
+ if (CFDictionaryGetValueIfPresent(cfExifDictVal,
+ (const void *)CFSTR("LensModel"),
+ (const void **)&cfLensModelStrVal)) {
+ if (CFStringContainsString(cfLensModelStrVal, CFSTR("front"))) {
+ cameraPosition = AVCaptureDevicePositionFront;
+ } else if (CFStringContainsString(cfLensModelStrVal, CFSTR("back"))) {
+ cameraPosition = AVCaptureDevicePositionBack;
+ }
+ }
+ }
+ }
+ CFRelease(attachments);
+ }
+ return cameraPosition;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.h b/third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.h
new file mode 100644
index 0000000000..b0324e8a19
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#include <string>
+
+#include "absl/strings/string_view.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface NSString (StdString)
+
+@property(nonatomic, readonly) std::string stdString;
+
++ (std::string)stdStringForString:(NSString *)nsString;
++ (NSString *)stringForStdString:(const std::string &)stdString;
+
+@end
+
+@interface NSString (AbslStringView)
+
++ (NSString *)stringForAbslStringView:(const absl::string_view)abslStringView;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.mm b/third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.mm
new file mode 100644
index 0000000000..c98432c445
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/NSString+StdString.mm
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "NSString+StdString.h"
+
+#include "absl/strings/string_view.h"
+
+@implementation NSString (StdString)
+
+- (std::string)stdString {
+ return [NSString stdStringForString:self];
+}
+
++ (std::string)stdStringForString:(NSString *)nsString {
+ NSData *charData = [nsString dataUsingEncoding:NSUTF8StringEncoding];
+ return std::string(reinterpret_cast<const char *>(charData.bytes),
+ charData.length);
+}
+
++ (NSString *)stringForStdString:(const std::string&)stdString {
+ // std::string may contain null termination character so we construct
+ // using length.
+ return [[NSString alloc] initWithBytes:stdString.data()
+ length:stdString.length()
+ encoding:NSUTF8StringEncoding];
+}
+
+@end
+
+@implementation NSString (AbslStringView)
+
++ (NSString *)stringForAbslStringView:(const absl::string_view)abslStringView {
+ return [[NSString alloc] initWithBytes:abslStringView.data()
+ length:abslStringView.length()
+ encoding:NSUTF8StringEncoding];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.h b/third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.h
new file mode 100644
index 0000000000..db9b15a45c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+#import "RTCMacros.h"
+
+@class AVCaptureSession;
+
+/** RTCCameraPreviewView is a view that renders local video from an
+ * AVCaptureSession.
+ */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCCameraPreviewView) : UIView
+
+/** The capture session being rendered in the view. Capture session
+ * is assigned to AVCaptureVideoPreviewLayer async in the same
+ * queue that the AVCaptureSession is started/stopped.
+ */
+@property(nonatomic, strong) AVCaptureSession* captureSession;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.m b/third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.m
new file mode 100644
index 0000000000..12e87d8d64
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/RTCCameraPreviewView.m
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCCameraPreviewView.h"
+
+#import <AVFoundation/AVFoundation.h>
+#import <UIKit/UIKit.h>
+
+#import "RTCDispatcher+Private.h"
+
+@implementation RTC_OBJC_TYPE (RTCCameraPreviewView)
+
+@synthesize captureSession = _captureSession;
+
++ (Class)layerClass {
+ return [AVCaptureVideoPreviewLayer class];
+}
+
+- (instancetype)initWithFrame:(CGRect)aRect {
+ self = [super initWithFrame:aRect];
+ if (self) {
+ [self addOrientationObserver];
+ }
+ return self;
+}
+
+- (instancetype)initWithCoder:(NSCoder*)aDecoder {
+ self = [super initWithCoder:aDecoder];
+ if (self) {
+ [self addOrientationObserver];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self removeOrientationObserver];
+}
+
+- (void)setCaptureSession:(AVCaptureSession *)captureSession {
+ if (_captureSession == captureSession) {
+ return;
+ }
+ _captureSession = captureSession;
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeMain
+ block:^{
+ AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+ block:^{
+ previewLayer.session = captureSession;
+ [RTC_OBJC_TYPE(RTCDispatcher)
+ dispatchAsyncOnType:RTCDispatcherTypeMain
+ block:^{
+ [self setCorrectVideoOrientation];
+ }];
+ }];
+ }];
+}
+
+- (void)layoutSubviews {
+ [super layoutSubviews];
+
+ // Update the video orientation based on the device orientation.
+ [self setCorrectVideoOrientation];
+}
+
+-(void)orientationChanged:(NSNotification *)notification {
+ [self setCorrectVideoOrientation];
+}
+
+- (void)setCorrectVideoOrientation {
+ // Get current device orientation.
+ UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation;
+ AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
+
+ // First check if we are allowed to set the video orientation.
+ if (previewLayer.connection.isVideoOrientationSupported) {
+ // Set the video orientation based on device orientation.
+ if (deviceOrientation == UIInterfaceOrientationPortraitUpsideDown) {
+ previewLayer.connection.videoOrientation =
+ AVCaptureVideoOrientationPortraitUpsideDown;
+ } else if (deviceOrientation == UIInterfaceOrientationLandscapeRight) {
+ previewLayer.connection.videoOrientation =
+ AVCaptureVideoOrientationLandscapeRight;
+ } else if (deviceOrientation == UIInterfaceOrientationLandscapeLeft) {
+ previewLayer.connection.videoOrientation =
+ AVCaptureVideoOrientationLandscapeLeft;
+ } else if (deviceOrientation == UIInterfaceOrientationPortrait) {
+ previewLayer.connection.videoOrientation =
+ AVCaptureVideoOrientationPortrait;
+ }
+ // If device orientation switches to FaceUp or FaceDown, don't change video orientation.
+ }
+}
+
+#pragma mark - Private
+
+- (void)addOrientationObserver {
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(orientationChanged:)
+ name:UIDeviceOrientationDidChangeNotification
+ object:nil];
+}
+
+- (void)removeOrientationObserver {
+ [[NSNotificationCenter defaultCenter] removeObserver:self
+ name:UIDeviceOrientationDidChangeNotification
+ object:nil];
+}
+
+- (AVCaptureVideoPreviewLayer *)previewLayer {
+ return (AVCaptureVideoPreviewLayer *)self.layer;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher+Private.h b/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher+Private.h
new file mode 100644
index 0000000000..195c651790
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher+Private.h
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDispatcher.h"
+
+@interface RTC_OBJC_TYPE (RTCDispatcher)
+()
+
+ + (dispatch_queue_t)dispatchQueueForType : (RTCDispatcherQueueType)dispatchType;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.h b/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.h
new file mode 100644
index 0000000000..e148af6dea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "RTCMacros.h"
+
+typedef NS_ENUM(NSInteger, RTCDispatcherQueueType) {
+ // Main dispatcher queue.
+ RTCDispatcherTypeMain,
+ // Used for starting/stopping AVCaptureSession, and assigning
+ // capture session to AVCaptureVideoPreviewLayer.
+ RTCDispatcherTypeCaptureSession,
+ // Used for operations on AVAudioSession.
+ RTCDispatcherTypeAudioSession,
+ // Used for operations on NWPathMonitor.
+ RTCDispatcherTypeNetworkMonitor,
+};
+
+/** Dispatcher that asynchronously dispatches blocks to a specific
+ * shared dispatch queue.
+ */
+RTC_OBJC_EXPORT
+@interface RTC_OBJC_TYPE (RTCDispatcher) : NSObject
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Dispatch the block asynchronously on the queue for dispatchType.
+ * @param dispatchType The queue type to dispatch on.
+ * @param block The block to dispatch asynchronously.
+ */
++ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType block:(dispatch_block_t)block;
+
+/** Returns YES if run on queue for the dispatchType otherwise NO.
+ * Useful for asserting that a method is run on a correct queue.
+ */
++ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.m b/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.m
new file mode 100644
index 0000000000..4df19bc297
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/RTCDispatcher.m
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDispatcher+Private.h"
+
+static dispatch_queue_t kAudioSessionQueue = nil;
+static dispatch_queue_t kCaptureSessionQueue = nil;
+static dispatch_queue_t kNetworkMonitorQueue = nil;
+
+@implementation RTC_OBJC_TYPE (RTCDispatcher)
+
++ (void)initialize {
+ static dispatch_once_t onceToken;
+ dispatch_once(&onceToken, ^{
+ kAudioSessionQueue = dispatch_queue_create(
+ "org.webrtc.RTCDispatcherAudioSession",
+ DISPATCH_QUEUE_SERIAL);
+ kCaptureSessionQueue = dispatch_queue_create(
+ "org.webrtc.RTCDispatcherCaptureSession",
+ DISPATCH_QUEUE_SERIAL);
+ kNetworkMonitorQueue =
+ dispatch_queue_create("org.webrtc.RTCDispatcherNetworkMonitor", DISPATCH_QUEUE_SERIAL);
+ });
+}
+
++ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
+ block:(dispatch_block_t)block {
+ dispatch_queue_t queue = [self dispatchQueueForType:dispatchType];
+ dispatch_async(queue, block);
+}
+
++ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType {
+ dispatch_queue_t targetQueue = [self dispatchQueueForType:dispatchType];
+ const char* targetLabel = dispatch_queue_get_label(targetQueue);
+ const char* currentLabel = dispatch_queue_get_label(DISPATCH_CURRENT_QUEUE_LABEL);
+
+ NSAssert(strlen(targetLabel) > 0, @"Label is required for the target queue.");
+ NSAssert(strlen(currentLabel) > 0, @"Label is required for the current queue.");
+
+ return strcmp(targetLabel, currentLabel) == 0;
+}
+
+#pragma mark - Private
+
++ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType {
+ switch (dispatchType) {
+ case RTCDispatcherTypeMain:
+ return dispatch_get_main_queue();
+ case RTCDispatcherTypeCaptureSession:
+ return kCaptureSessionQueue;
+ case RTCDispatcherTypeAudioSession:
+ return kAudioSessionQueue;
+ case RTCDispatcherTypeNetworkMonitor:
+ return kNetworkMonitorQueue;
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.h b/third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.h
new file mode 100644
index 0000000000..ab477e2ada
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+typedef NS_ENUM(NSInteger, RTCDeviceType) {
+ RTCDeviceTypeUnknown,
+ RTCDeviceTypeIPhone1G,
+ RTCDeviceTypeIPhone3G,
+ RTCDeviceTypeIPhone3GS,
+ RTCDeviceTypeIPhone4,
+ RTCDeviceTypeIPhone4Verizon,
+ RTCDeviceTypeIPhone4S,
+ RTCDeviceTypeIPhone5GSM,
+ RTCDeviceTypeIPhone5GSM_CDMA,
+ RTCDeviceTypeIPhone5CGSM,
+ RTCDeviceTypeIPhone5CGSM_CDMA,
+ RTCDeviceTypeIPhone5SGSM,
+ RTCDeviceTypeIPhone5SGSM_CDMA,
+ RTCDeviceTypeIPhone6Plus,
+ RTCDeviceTypeIPhone6,
+ RTCDeviceTypeIPhone6S,
+ RTCDeviceTypeIPhone6SPlus,
+ RTCDeviceTypeIPhone7,
+ RTCDeviceTypeIPhone7Plus,
+ RTCDeviceTypeIPhoneSE,
+ RTCDeviceTypeIPhone8,
+ RTCDeviceTypeIPhone8Plus,
+ RTCDeviceTypeIPhoneX,
+ RTCDeviceTypeIPhoneXS,
+ RTCDeviceTypeIPhoneXSMax,
+ RTCDeviceTypeIPhoneXR,
+ RTCDeviceTypeIPhone11,
+ RTCDeviceTypeIPhone11Pro,
+ RTCDeviceTypeIPhone11ProMax,
+ RTCDeviceTypeIPhone12Mini,
+ RTCDeviceTypeIPhone12,
+ RTCDeviceTypeIPhone12Pro,
+ RTCDeviceTypeIPhone12ProMax,
+ RTCDeviceTypeIPhoneSE2Gen,
+ RTCDeviceTypeIPhone13,
+ RTCDeviceTypeIPhone13Mini,
+ RTCDeviceTypeIPhone13Pro,
+ RTCDeviceTypeIPhone13ProMax,
+
+ RTCDeviceTypeIPodTouch1G,
+ RTCDeviceTypeIPodTouch2G,
+ RTCDeviceTypeIPodTouch3G,
+ RTCDeviceTypeIPodTouch4G,
+ RTCDeviceTypeIPodTouch5G,
+ RTCDeviceTypeIPodTouch6G,
+ RTCDeviceTypeIPodTouch7G,
+ RTCDeviceTypeIPad,
+ RTCDeviceTypeIPad2Wifi,
+ RTCDeviceTypeIPad2GSM,
+ RTCDeviceTypeIPad2CDMA,
+ RTCDeviceTypeIPad2Wifi2,
+ RTCDeviceTypeIPadMiniWifi,
+ RTCDeviceTypeIPadMiniGSM,
+ RTCDeviceTypeIPadMiniGSM_CDMA,
+ RTCDeviceTypeIPad3Wifi,
+ RTCDeviceTypeIPad3GSM_CDMA,
+ RTCDeviceTypeIPad3GSM,
+ RTCDeviceTypeIPad4Wifi,
+ RTCDeviceTypeIPad4GSM,
+ RTCDeviceTypeIPad4GSM_CDMA,
+ RTCDeviceTypeIPad5,
+ RTCDeviceTypeIPad6,
+ RTCDeviceTypeIPadAirWifi,
+ RTCDeviceTypeIPadAirCellular,
+ RTCDeviceTypeIPadAirWifiCellular,
+ RTCDeviceTypeIPadAir2,
+ RTCDeviceTypeIPadMini2GWifi,
+ RTCDeviceTypeIPadMini2GCellular,
+ RTCDeviceTypeIPadMini2GWifiCellular,
+ RTCDeviceTypeIPadMini3,
+ RTCDeviceTypeIPadMini4,
+ RTCDeviceTypeIPadPro9Inch,
+ RTCDeviceTypeIPadPro12Inch,
+ RTCDeviceTypeIPadPro12Inch2,
+ RTCDeviceTypeIPadPro10Inch,
+ RTCDeviceTypeIPad7Gen10Inch,
+ RTCDeviceTypeIPadPro3Gen11Inch,
+ RTCDeviceTypeIPadPro3Gen12Inch,
+ RTCDeviceTypeIPadPro4Gen11Inch,
+ RTCDeviceTypeIPadPro4Gen12Inch,
+ RTCDeviceTypeIPadMini5Gen,
+ RTCDeviceTypeIPadAir3Gen,
+ RTCDeviceTypeIPad8,
+ RTCDeviceTypeIPad9,
+ RTCDeviceTypeIPadMini6,
+ RTCDeviceTypeIPadAir4Gen,
+ RTCDeviceTypeIPadPro5Gen11Inch,
+ RTCDeviceTypeIPadPro5Gen12Inch,
+ RTCDeviceTypeSimulatori386,
+ RTCDeviceTypeSimulatorx86_64,
+};
+
+@interface UIDevice (RTCDevice)
+
++ (RTCDeviceType)deviceType;
++ (BOOL)isIOS11OrLater;
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.mm b/third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.mm
new file mode 100644
index 0000000000..77a5d79f79
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/UIDevice+RTCDevice.mm
@@ -0,0 +1,171 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "UIDevice+RTCDevice.h"
+
+#import <sys/utsname.h>
+#include <memory>
+
+@implementation UIDevice (RTCDevice)
+
++ (RTCDeviceType)deviceType {
+ NSDictionary *machineNameToType = @{
+ @"iPhone1,1" : @(RTCDeviceTypeIPhone1G),
+ @"iPhone1,2" : @(RTCDeviceTypeIPhone3G),
+ @"iPhone2,1" : @(RTCDeviceTypeIPhone3GS),
+ @"iPhone3,1" : @(RTCDeviceTypeIPhone4),
+ @"iPhone3,2" : @(RTCDeviceTypeIPhone4),
+ @"iPhone3,3" : @(RTCDeviceTypeIPhone4Verizon),
+ @"iPhone4,1" : @(RTCDeviceTypeIPhone4S),
+ @"iPhone5,1" : @(RTCDeviceTypeIPhone5GSM),
+ @"iPhone5,2" : @(RTCDeviceTypeIPhone5GSM_CDMA),
+ @"iPhone5,3" : @(RTCDeviceTypeIPhone5CGSM),
+ @"iPhone5,4" : @(RTCDeviceTypeIPhone5CGSM_CDMA),
+ @"iPhone6,1" : @(RTCDeviceTypeIPhone5SGSM),
+ @"iPhone6,2" : @(RTCDeviceTypeIPhone5SGSM_CDMA),
+ @"iPhone7,1" : @(RTCDeviceTypeIPhone6Plus),
+ @"iPhone7,2" : @(RTCDeviceTypeIPhone6),
+ @"iPhone8,1" : @(RTCDeviceTypeIPhone6S),
+ @"iPhone8,2" : @(RTCDeviceTypeIPhone6SPlus),
+ @"iPhone8,4" : @(RTCDeviceTypeIPhoneSE),
+ @"iPhone9,1" : @(RTCDeviceTypeIPhone7),
+ @"iPhone9,2" : @(RTCDeviceTypeIPhone7Plus),
+ @"iPhone9,3" : @(RTCDeviceTypeIPhone7),
+ @"iPhone9,4" : @(RTCDeviceTypeIPhone7Plus),
+ @"iPhone10,1" : @(RTCDeviceTypeIPhone8),
+ @"iPhone10,2" : @(RTCDeviceTypeIPhone8Plus),
+ @"iPhone10,3" : @(RTCDeviceTypeIPhoneX),
+ @"iPhone10,4" : @(RTCDeviceTypeIPhone8),
+ @"iPhone10,5" : @(RTCDeviceTypeIPhone8Plus),
+ @"iPhone10,6" : @(RTCDeviceTypeIPhoneX),
+ @"iPhone11,2" : @(RTCDeviceTypeIPhoneXS),
+ @"iPhone11,4" : @(RTCDeviceTypeIPhoneXSMax),
+ @"iPhone11,6" : @(RTCDeviceTypeIPhoneXSMax),
+ @"iPhone11,8" : @(RTCDeviceTypeIPhoneXR),
+ @"iPhone12,1" : @(RTCDeviceTypeIPhone11),
+ @"iPhone12,3" : @(RTCDeviceTypeIPhone11Pro),
+ @"iPhone12,5" : @(RTCDeviceTypeIPhone11ProMax),
+ @"iPhone12,8" : @(RTCDeviceTypeIPhoneSE2Gen),
+ @"iPhone13,1" : @(RTCDeviceTypeIPhone12Mini),
+ @"iPhone13,2" : @(RTCDeviceTypeIPhone12),
+ @"iPhone13,3" : @(RTCDeviceTypeIPhone12Pro),
+ @"iPhone13,4" : @(RTCDeviceTypeIPhone12ProMax),
+ @"iPhone14,5" : @(RTCDeviceTypeIPhone13),
+ @"iPhone14,4" : @(RTCDeviceTypeIPhone13Mini),
+ @"iPhone14,2" : @(RTCDeviceTypeIPhone13Pro),
+ @"iPhone14,3" : @(RTCDeviceTypeIPhone13ProMax),
+ @"iPod1,1" : @(RTCDeviceTypeIPodTouch1G),
+ @"iPod2,1" : @(RTCDeviceTypeIPodTouch2G),
+ @"iPod3,1" : @(RTCDeviceTypeIPodTouch3G),
+ @"iPod4,1" : @(RTCDeviceTypeIPodTouch4G),
+ @"iPod5,1" : @(RTCDeviceTypeIPodTouch5G),
+ @"iPod7,1" : @(RTCDeviceTypeIPodTouch6G),
+ @"iPod9,1" : @(RTCDeviceTypeIPodTouch7G),
+ @"iPad1,1" : @(RTCDeviceTypeIPad),
+ @"iPad2,1" : @(RTCDeviceTypeIPad2Wifi),
+ @"iPad2,2" : @(RTCDeviceTypeIPad2GSM),
+ @"iPad2,3" : @(RTCDeviceTypeIPad2CDMA),
+ @"iPad2,4" : @(RTCDeviceTypeIPad2Wifi2),
+ @"iPad2,5" : @(RTCDeviceTypeIPadMiniWifi),
+ @"iPad2,6" : @(RTCDeviceTypeIPadMiniGSM),
+ @"iPad2,7" : @(RTCDeviceTypeIPadMiniGSM_CDMA),
+ @"iPad3,1" : @(RTCDeviceTypeIPad3Wifi),
+ @"iPad3,2" : @(RTCDeviceTypeIPad3GSM_CDMA),
+ @"iPad3,3" : @(RTCDeviceTypeIPad3GSM),
+ @"iPad3,4" : @(RTCDeviceTypeIPad4Wifi),
+ @"iPad3,5" : @(RTCDeviceTypeIPad4GSM),
+ @"iPad3,6" : @(RTCDeviceTypeIPad4GSM_CDMA),
+ @"iPad4,1" : @(RTCDeviceTypeIPadAirWifi),
+ @"iPad4,2" : @(RTCDeviceTypeIPadAirCellular),
+ @"iPad4,3" : @(RTCDeviceTypeIPadAirWifiCellular),
+ @"iPad4,4" : @(RTCDeviceTypeIPadMini2GWifi),
+ @"iPad4,5" : @(RTCDeviceTypeIPadMini2GCellular),
+ @"iPad4,6" : @(RTCDeviceTypeIPadMini2GWifiCellular),
+ @"iPad4,7" : @(RTCDeviceTypeIPadMini3),
+ @"iPad4,8" : @(RTCDeviceTypeIPadMini3),
+ @"iPad4,9" : @(RTCDeviceTypeIPadMini3),
+ @"iPad5,1" : @(RTCDeviceTypeIPadMini4),
+ @"iPad5,2" : @(RTCDeviceTypeIPadMini4),
+ @"iPad5,3" : @(RTCDeviceTypeIPadAir2),
+ @"iPad5,4" : @(RTCDeviceTypeIPadAir2),
+ @"iPad6,3" : @(RTCDeviceTypeIPadPro9Inch),
+ @"iPad6,4" : @(RTCDeviceTypeIPadPro9Inch),
+ @"iPad6,7" : @(RTCDeviceTypeIPadPro12Inch),
+ @"iPad6,8" : @(RTCDeviceTypeIPadPro12Inch),
+ @"iPad6,11" : @(RTCDeviceTypeIPad5),
+ @"iPad6,12" : @(RTCDeviceTypeIPad5),
+ @"iPad7,1" : @(RTCDeviceTypeIPadPro12Inch2),
+ @"iPad7,2" : @(RTCDeviceTypeIPadPro12Inch2),
+ @"iPad7,3" : @(RTCDeviceTypeIPadPro10Inch),
+ @"iPad7,4" : @(RTCDeviceTypeIPadPro10Inch),
+ @"iPad7,5" : @(RTCDeviceTypeIPad6),
+ @"iPad7,6" : @(RTCDeviceTypeIPad6),
+ @"iPad7,11" : @(RTCDeviceTypeIPad7Gen10Inch),
+ @"iPad7,12" : @(RTCDeviceTypeIPad7Gen10Inch),
+ @"iPad8,1" : @(RTCDeviceTypeIPadPro3Gen11Inch),
+ @"iPad8,2" : @(RTCDeviceTypeIPadPro3Gen11Inch),
+ @"iPad8,3" : @(RTCDeviceTypeIPadPro3Gen11Inch),
+ @"iPad8,4" : @(RTCDeviceTypeIPadPro3Gen11Inch),
+ @"iPad8,5" : @(RTCDeviceTypeIPadPro3Gen12Inch),
+ @"iPad8,6" : @(RTCDeviceTypeIPadPro3Gen12Inch),
+ @"iPad8,7" : @(RTCDeviceTypeIPadPro3Gen12Inch),
+ @"iPad8,8" : @(RTCDeviceTypeIPadPro3Gen12Inch),
+ @"iPad8,9" : @(RTCDeviceTypeIPadPro4Gen11Inch),
+ @"iPad8,10" : @(RTCDeviceTypeIPadPro4Gen11Inch),
+ @"iPad8,11" : @(RTCDeviceTypeIPadPro4Gen12Inch),
+ @"iPad8,12" : @(RTCDeviceTypeIPadPro4Gen12Inch),
+ @"iPad11,1" : @(RTCDeviceTypeIPadMini5Gen),
+ @"iPad11,2" : @(RTCDeviceTypeIPadMini5Gen),
+ @"iPad11,3" : @(RTCDeviceTypeIPadAir3Gen),
+ @"iPad11,4" : @(RTCDeviceTypeIPadAir3Gen),
+ @"iPad11,6" : @(RTCDeviceTypeIPad8),
+ @"iPad11,7" : @(RTCDeviceTypeIPad8),
+ @"iPad12,1" : @(RTCDeviceTypeIPad9),
+ @"iPad12,2" : @(RTCDeviceTypeIPad9),
+ @"iPad13,1" : @(RTCDeviceTypeIPadAir4Gen),
+ @"iPad13,2" : @(RTCDeviceTypeIPadAir4Gen),
+ @"iPad13,4" : @(RTCDeviceTypeIPadPro5Gen11Inch),
+ @"iPad13,5" : @(RTCDeviceTypeIPadPro5Gen11Inch),
+ @"iPad13,6" : @(RTCDeviceTypeIPadPro5Gen11Inch),
+ @"iPad13,7" : @(RTCDeviceTypeIPadPro5Gen11Inch),
+ @"iPad13,8" : @(RTCDeviceTypeIPadPro5Gen12Inch),
+ @"iPad13,9" : @(RTCDeviceTypeIPadPro5Gen12Inch),
+ @"iPad13,10" : @(RTCDeviceTypeIPadPro5Gen12Inch),
+ @"iPad13,11" : @(RTCDeviceTypeIPadPro5Gen12Inch),
+ @"iPad14,1" : @(RTCDeviceTypeIPadMini6),
+ @"iPad14,2" : @(RTCDeviceTypeIPadMini6),
+ @"i386" : @(RTCDeviceTypeSimulatori386),
+ @"x86_64" : @(RTCDeviceTypeSimulatorx86_64),
+ };
+
+ RTCDeviceType deviceType = RTCDeviceTypeUnknown;
+ NSNumber *typeNumber = machineNameToType[[self machineName]];
+ if (typeNumber) {
+ deviceType = static_cast<RTCDeviceType>(typeNumber.integerValue);
+ }
+ return deviceType;
+}
+
++ (NSString *)machineName {
+ struct utsname systemInfo;
+ uname(&systemInfo);
+ return [[NSString alloc] initWithCString:systemInfo.machine
+ encoding:NSUTF8StringEncoding];
+}
+
++ (double)currentDeviceSystemVersion {
+ return [self currentDevice].systemVersion.doubleValue;
+}
+
++ (BOOL)isIOS11OrLater {
+ return [self currentDeviceSystemVersion] >= 11.0;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/helpers/noop.mm b/third_party/libwebrtc/sdk/objc/helpers/noop.mm
new file mode 100644
index 0000000000..16a8e6d5c1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/noop.mm
@@ -0,0 +1,13 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file is only needed to make ninja happy on some platforms.
+// On some platforms it is not possible to link an rtc_static_library
+// without any source file listed in the GN target.
diff --git a/third_party/libwebrtc/sdk/objc/helpers/scoped_cftyperef.h b/third_party/libwebrtc/sdk/objc/helpers/scoped_cftyperef.h
new file mode 100644
index 0000000000..092f02b3af
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/helpers/scoped_cftyperef.h
@@ -0,0 +1,116 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef SDK_OBJC_HELPERS_SCOPED_CFTYPEREF_H_
+#define SDK_OBJC_HELPERS_SCOPED_CFTYPEREF_H_
+
+#include <CoreFoundation/CoreFoundation.h>
+namespace rtc {
+
+// RETAIN: ScopedTypeRef should retain the object when it takes
+// ownership.
+// ASSUME: Assume the object already has already been retained.
+// ScopedTypeRef takes over ownership.
+enum class RetainPolicy { RETAIN, ASSUME };
+
+namespace internal {
+template <typename T>
+struct CFTypeRefTraits {
+ static T InvalidValue() { return nullptr; }
+ static void Release(T ref) { CFRelease(ref); }
+ static T Retain(T ref) {
+ CFRetain(ref);
+ return ref;
+ }
+};
+
+template <typename T, typename Traits>
+class ScopedTypeRef {
+ public:
+ ScopedTypeRef() : ptr_(Traits::InvalidValue()) {}
+ explicit ScopedTypeRef(T ptr) : ptr_(ptr) {}
+ ScopedTypeRef(T ptr, RetainPolicy policy) : ScopedTypeRef(ptr) {
+ if (ptr_ && policy == RetainPolicy::RETAIN)
+ Traits::Retain(ptr_);
+ }
+
+ ScopedTypeRef(const ScopedTypeRef<T, Traits>& rhs) : ptr_(rhs.ptr_) {
+ if (ptr_)
+ ptr_ = Traits::Retain(ptr_);
+ }
+
+ ~ScopedTypeRef() {
+ if (ptr_) {
+ Traits::Release(ptr_);
+ }
+ }
+
+ T get() const { return ptr_; }
+ T operator->() const { return ptr_; }
+ explicit operator bool() const { return ptr_; }
+
+ bool operator!() const { return !ptr_; }
+
+ ScopedTypeRef& operator=(const T& rhs) {
+ if (ptr_)
+ Traits::Release(ptr_);
+ ptr_ = rhs;
+ return *this;
+ }
+
+ ScopedTypeRef& operator=(const ScopedTypeRef<T, Traits>& rhs) {
+ reset(rhs.get(), RetainPolicy::RETAIN);
+ return *this;
+ }
+
+ // This is intended to take ownership of objects that are
+ // created by pass-by-pointer initializers.
+ T* InitializeInto() {
+ RTC_DCHECK(!ptr_);
+ return &ptr_;
+ }
+
+ void reset(T ptr, RetainPolicy policy = RetainPolicy::ASSUME) {
+ if (ptr && policy == RetainPolicy::RETAIN)
+ Traits::Retain(ptr);
+ if (ptr_)
+ Traits::Release(ptr_);
+ ptr_ = ptr;
+ }
+
+ T release() {
+ T temp = ptr_;
+ ptr_ = Traits::InvalidValue();
+ return temp;
+ }
+
+ private:
+ T ptr_;
+};
+} // namespace internal
+
+template <typename T>
+using ScopedCFTypeRef =
+ internal::ScopedTypeRef<T, internal::CFTypeRefTraits<T>>;
+
+template <typename T>
+static ScopedCFTypeRef<T> AdoptCF(T cftype) {
+ return ScopedCFTypeRef<T>(cftype, RetainPolicy::RETAIN);
+}
+
+template <typename T>
+static ScopedCFTypeRef<T> ScopedCF(T cftype) {
+ return ScopedCFTypeRef<T>(cftype);
+}
+
+} // namespace rtc
+
+#endif // SDK_OBJC_HELPERS_SCOPED_CFTYPEREF_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/audio_device_module.h b/third_party/libwebrtc/sdk/objc/native/api/audio_device_module.h
new file mode 100644
index 0000000000..3405469709
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/audio_device_module.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_AUDIO_DEVICE_MODULE_H_
+#define SDK_OBJC_NATIVE_API_AUDIO_DEVICE_MODULE_H_
+
+#include <memory>
+
+#include "modules/audio_device/include/audio_device.h"
+
+namespace webrtc {
+
+// If `bypass_voice_processing` is true, WebRTC will attempt to disable hardware
+// audio processing on iOS.
+// Warning: Setting `bypass_voice_processing` will have unpredictable
+// consequences for the audio path in the device. It is not advisable to use in
+// most scenarios.
+rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(
+ bool bypass_voice_processing = false);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_AUDIO_DEVICE_MODULE_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/audio_device_module.mm b/third_party/libwebrtc/sdk/objc/native/api/audio_device_module.mm
new file mode 100644
index 0000000000..4e7b681e69
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/audio_device_module.mm
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_module.h"
+
+#include "api/make_ref_counted.h"
+#include "rtc_base/logging.h"
+
+#include "sdk/objc/native/src/audio/audio_device_module_ios.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(bool bypass_voice_processing) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+#if defined(WEBRTC_IOS)
+ return rtc::make_ref_counted<ios_adm::AudioDeviceModuleIOS>(bypass_voice_processing);
+#else
+ RTC_LOG(LS_ERROR) << "current platform is not supported => this module will self destruct!";
+ return nullptr;
+#endif
+}
+}
diff --git a/third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.h b/third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.h
new file mode 100644
index 0000000000..903c66893d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_NETWORK_MONITOR_FACTORY_H_
+#define SDK_OBJC_NATIVE_API_NETWORK_MONITOR_FACTORY_H_
+
+#include <memory>
+
+#include "rtc_base/network_monitor_factory.h"
+
+namespace webrtc {
+
+std::unique_ptr<rtc::NetworkMonitorFactory> CreateNetworkMonitorFactory();
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_NETWORK_MONITOR_FACTORY_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.mm b/third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.mm
new file mode 100644
index 0000000000..acde634b1d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/network_monitor_factory.mm
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "network_monitor_factory.h"
+
+#if defined(WEBRTC_IOS)
+#include "sdk/objc/native/src/objc_network_monitor.h"
+#endif
+
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+std::unique_ptr<rtc::NetworkMonitorFactory> CreateNetworkMonitorFactory() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+#if defined(WEBRTC_IOS)
+ return std::make_unique<ObjCNetworkMonitorFactory>();
+#else
+ return nullptr;
+#endif
+}
+
+}
diff --git a/third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.h b/third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.h
new file mode 100644
index 0000000000..35ab1be9a8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_SSL_CERTIFICATE_VERIFIER_H_
+#define SDK_OBJC_NATIVE_API_SSL_CERTIFICATE_VERIFIER_H_
+
+#include <memory>
+
+#import "RTCSSLCertificateVerifier.h"
+#include "rtc_base/ssl_certificate.h"
+
+namespace webrtc {
+
+std::unique_ptr<rtc::SSLCertificateVerifier> ObjCToNativeCertificateVerifier(
+ id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)> objc_certificate_verifier);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_SSL_CERTIFICATE_VERIFIER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.mm b/third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.mm
new file mode 100644
index 0000000000..4437402b9c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/ssl_certificate_verifier.mm
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "ssl_certificate_verifier.h"
+
+#include "rtc_base/buffer.h"
+
+namespace {
+
+class SSLCertificateVerifierAdapter final : public rtc::SSLCertificateVerifier {
+ public:
+ SSLCertificateVerifierAdapter(
+ id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)> objc_certificate_verifier)
+ : objc_certificate_verifier_(objc_certificate_verifier) {
+ RTC_DCHECK(objc_certificate_verifier_ != nil);
+ }
+
+ bool Verify(const rtc::SSLCertificate& certificate) override {
+ @autoreleasepool {
+ rtc::Buffer der_buffer;
+ certificate.ToDER(&der_buffer);
+ NSData* serialized_certificate = [[NSData alloc] initWithBytes:der_buffer.data()
+ length:der_buffer.size()];
+ return [objc_certificate_verifier_ verify:serialized_certificate];
+ }
+ }
+
+ private:
+ id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)> objc_certificate_verifier_;
+};
+
+}
+
+namespace webrtc {
+
+std::unique_ptr<rtc::SSLCertificateVerifier> ObjCToNativeCertificateVerifier(
+ id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)> objc_certificate_verifier) {
+ return std::make_unique<SSLCertificateVerifierAdapter>(objc_certificate_verifier);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_capturer.h b/third_party/libwebrtc/sdk/objc/native/api/video_capturer.h
new file mode 100644
index 0000000000..9847d8148b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_capturer.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_VIDEO_CAPTURER_H_
+#define SDK_OBJC_NATIVE_API_VIDEO_CAPTURER_H_
+
+#import "base/RTCVideoCapturer.h"
+
+#include "api/media_stream_interface.h"
+#include "api/scoped_refptr.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> ObjCToNativeVideoCapturer(
+ RTC_OBJC_TYPE(RTCVideoCapturer) * objc_video_capturer,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_VIDEO_CAPTURER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_capturer.mm b/third_party/libwebrtc/sdk/objc/native/api/video_capturer.mm
new file mode 100644
index 0000000000..a7260ab802
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_capturer.mm
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/api/video_capturer.h"
+
+#include "absl/memory/memory.h"
+#include "api/video_track_source_proxy_factory.h"
+#include "sdk/objc/native/src/objc_video_track_source.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> ObjCToNativeVideoCapturer(
+ RTC_OBJC_TYPE(RTCVideoCapturer) * objc_video_capturer,
+ rtc::Thread *signaling_thread,
+ rtc::Thread *worker_thread) {
+ RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init];
+ rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objc_video_track_source =
+ rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>(adapter);
+ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> video_source =
+ webrtc::CreateVideoTrackSourceProxy(
+ signaling_thread, worker_thread, objc_video_track_source.get());
+
+ objc_video_capturer.delegate = adapter;
+
+ return video_source;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.h b/third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.h
new file mode 100644
index 0000000000..03d8af3cfe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_VIDEO_DECODER_FACTORY_H_
+#define SDK_OBJC_NATIVE_API_VIDEO_DECODER_FACTORY_H_
+
+#include <memory>
+
+#import "base/RTCVideoDecoderFactory.h"
+
+#include "api/video_codecs/video_decoder_factory.h"
+
+namespace webrtc {
+
+std::unique_ptr<VideoDecoderFactory> ObjCToNativeVideoDecoderFactory(
+ id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> objc_video_decoder_factory);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_VIDEO_DECODER_FACTORY_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.mm b/third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.mm
new file mode 100644
index 0000000000..d418f2fe6f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_decoder_factory.mm
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/api/video_decoder_factory.h"
+
+#include <memory>
+
+#include "sdk/objc/native/src/objc_video_decoder_factory.h"
+
+namespace webrtc {
+
+std::unique_ptr<VideoDecoderFactory> ObjCToNativeVideoDecoderFactory(
+ id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> objc_video_decoder_factory) {
+ return std::make_unique<ObjCVideoDecoderFactory>(objc_video_decoder_factory);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.h b/third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.h
new file mode 100644
index 0000000000..6e551b288d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_VIDEO_ENCODER_FACTORY_H_
+#define SDK_OBJC_NATIVE_API_VIDEO_ENCODER_FACTORY_H_
+
+#include <memory>
+
+#import "base/RTCVideoEncoderFactory.h"
+
+#include "api/video_codecs/video_encoder_factory.h"
+
+namespace webrtc {
+
+std::unique_ptr<VideoEncoderFactory> ObjCToNativeVideoEncoderFactory(
+ id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> objc_video_encoder_factory);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_VIDEO_ENCODER_FACTORY_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.mm b/third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.mm
new file mode 100644
index 0000000000..6fa5563f75
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_encoder_factory.mm
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/api/video_encoder_factory.h"
+
+#include <memory>
+
+#include "sdk/objc/native/src/objc_video_encoder_factory.h"
+
+namespace webrtc {
+
+std::unique_ptr<VideoEncoderFactory> ObjCToNativeVideoEncoderFactory(
+ id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> objc_video_encoder_factory) {
+ return std::make_unique<ObjCVideoEncoderFactory>(objc_video_encoder_factory);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_frame.h b/third_party/libwebrtc/sdk/objc/native/api/video_frame.h
new file mode 100644
index 0000000000..b4416ffabe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_frame.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_VIDEO_FRAME_H_
+#define SDK_OBJC_NATIVE_API_VIDEO_FRAME_H_
+
+#import "base/RTCVideoFrame.h"
+
+#include "api/video/video_frame.h"
+
+namespace webrtc {
+
+RTC_OBJC_TYPE(RTCVideoFrame) * NativeToObjCVideoFrame(const VideoFrame& frame);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_VIDEO_FRAME_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_frame.mm b/third_party/libwebrtc/sdk/objc/native/api/video_frame.mm
new file mode 100644
index 0000000000..b82994fd5f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_frame.mm
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/api/video_frame.h"
+
+#include "sdk/objc/native/src/objc_video_frame.h"
+
+namespace webrtc {
+
+RTC_OBJC_TYPE(RTCVideoFrame) * NativeToObjCVideoFrame(const VideoFrame& frame) {
+ return ToObjCVideoFrame(frame);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.h b/third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.h
new file mode 100644
index 0000000000..204d65d850
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_VIDEO_FRAME_BUFFER_H_
+#define SDK_OBJC_NATIVE_API_VIDEO_FRAME_BUFFER_H_
+
+#import "base/RTCVideoFrameBuffer.h"
+
+#include "api/scoped_refptr.h"
+#include "common_video/include/video_frame_buffer.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<VideoFrameBuffer> ObjCToNativeVideoFrameBuffer(
+ id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> objc_video_frame_buffer);
+
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> NativeToObjCVideoFrameBuffer(
+ const rtc::scoped_refptr<VideoFrameBuffer>& buffer);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_VIDEO_FRAME_BUFFER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.mm b/third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.mm
new file mode 100644
index 0000000000..4fe9037bce
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_frame_buffer.mm
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/api/video_frame_buffer.h"
+
+#include "api/make_ref_counted.h"
+#include "sdk/objc/native/src/objc_frame_buffer.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<VideoFrameBuffer> ObjCToNativeVideoFrameBuffer(
+ id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> objc_video_frame_buffer) {
+ return rtc::make_ref_counted<ObjCFrameBuffer>(objc_video_frame_buffer);
+}
+
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> NativeToObjCVideoFrameBuffer(
+ const rtc::scoped_refptr<VideoFrameBuffer> &buffer) {
+ return ToObjCVideoFrameBuffer(buffer);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_renderer.h b/third_party/libwebrtc/sdk/objc/native/api/video_renderer.h
new file mode 100644
index 0000000000..04796b8049
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_renderer.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_API_VIDEO_RENDERER_H_
+#define SDK_OBJC_NATIVE_API_VIDEO_RENDERER_H_
+
+#import "base/RTCVideoRenderer.h"
+
+#include <memory>
+
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+
+namespace webrtc {
+
+std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> ObjCToNativeVideoRenderer(
+ id<RTC_OBJC_TYPE(RTCVideoRenderer)> objc_video_renderer);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_API_VIDEO_RENDERER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/api/video_renderer.mm b/third_party/libwebrtc/sdk/objc/native/api/video_renderer.mm
new file mode 100644
index 0000000000..e92d47d1e3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/api/video_renderer.mm
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/api/video_renderer.h"
+
+#include <memory>
+
+#include "sdk/objc/native/src/objc_video_renderer.h"
+
+namespace webrtc {
+
+std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> ObjCToNativeVideoRenderer(
+ id<RTC_OBJC_TYPE(RTCVideoRenderer)> objc_video_renderer) {
+ return std::make_unique<ObjCVideoRenderer>(objc_video_renderer);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.h b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.h
new file mode 100644
index 0000000000..dc9f462063
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.h
@@ -0,0 +1,308 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_DEVICE_IOS_H_
+#define SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_DEVICE_IOS_H_
+
+#include <atomic>
+#include <memory>
+
+#include "api/sequence_checker.h"
+#include "audio_session_observer.h"
+#include "modules/audio_device/audio_device_generic.h"
+#include "rtc_base/buffer.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "sdk/objc/base/RTCMacros.h"
+#include "voice_processing_audio_unit.h"
+
+RTC_FWD_DECL_OBJC_CLASS(RTCNativeAudioSessionDelegateAdapter);
+
+namespace webrtc {
+
+class FineAudioBuffer;
+
+namespace ios_adm {
+
+// Implements full duplex 16-bit mono PCM audio support for iOS using a
+// Voice-Processing (VP) I/O audio unit in Core Audio. The VP I/O audio unit
+// supports audio echo cancellation. It also adds automatic gain control,
+// adjustment of voice-processing quality and muting.
+//
+// An instance must be created and destroyed on one and the same thread.
+// All supported public methods must also be called on the same thread.
+// A thread checker will RTC_DCHECK if any supported method is called on an
+// invalid thread.
+//
+// Recorded audio will be delivered on a real-time internal I/O thread in the
+// audio unit. The audio unit will also ask for audio data to play out on this
+// same thread.
+class AudioDeviceIOS : public AudioDeviceGeneric,
+ public AudioSessionObserver,
+ public VoiceProcessingAudioUnitObserver,
+ public rtc::MessageHandler {
+ public:
+ explicit AudioDeviceIOS(bool bypass_voice_processing);
+ ~AudioDeviceIOS() override;
+
+ void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
+
+ InitStatus Init() override;
+ int32_t Terminate() override;
+ bool Initialized() const override;
+
+ int32_t InitPlayout() override;
+ bool PlayoutIsInitialized() const override;
+
+ int32_t InitRecording() override;
+ bool RecordingIsInitialized() const override;
+
+ int32_t StartPlayout() override;
+ int32_t StopPlayout() override;
+ bool Playing() const override;
+
+ int32_t StartRecording() override;
+ int32_t StopRecording() override;
+ bool Recording() const override;
+
+ // These methods returns hard-coded delay values and not dynamic delay
+ // estimates. The reason is that iOS supports a built-in AEC and the WebRTC
+ // AEC will always be disabled in the Libjingle layer to avoid running two
+ // AEC implementations at the same time. And, it saves resources to avoid
+ // updating these delay values continuously.
+ // TODO(henrika): it would be possible to mark these two methods as not
+ // implemented since they are only called for A/V-sync purposes today and
+ // A/V-sync is not supported on iOS. However, we avoid adding error messages
+ // the log by using these dummy implementations instead.
+ int32_t PlayoutDelay(uint16_t& delayMS) const override;
+
+ // No implementation for playout underrun on iOS. We override it to avoid a
+ // periodic log that it isn't available from the base class.
+ int32_t GetPlayoutUnderrunCount() const override { return -1; }
+
+ // Native audio parameters stored during construction.
+ // These methods are unique for the iOS implementation.
+ int GetPlayoutAudioParameters(AudioParameters* params) const override;
+ int GetRecordAudioParameters(AudioParameters* params) const override;
+
+ // These methods are currently not fully implemented on iOS:
+
+ // See audio_device_not_implemented.cc for trivial implementations.
+ int32_t ActiveAudioLayer(
+ AudioDeviceModule::AudioLayer& audioLayer) const override;
+ int32_t PlayoutIsAvailable(bool& available) override;
+ int32_t RecordingIsAvailable(bool& available) override;
+ int16_t PlayoutDevices() override;
+ int16_t RecordingDevices() override;
+ int32_t PlayoutDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) override;
+ int32_t RecordingDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) override;
+ int32_t SetPlayoutDevice(uint16_t index) override;
+ int32_t SetPlayoutDevice(
+ AudioDeviceModule::WindowsDeviceType device) override;
+ int32_t SetRecordingDevice(uint16_t index) override;
+ int32_t SetRecordingDevice(
+ AudioDeviceModule::WindowsDeviceType device) override;
+ int32_t InitSpeaker() override;
+ bool SpeakerIsInitialized() const override;
+ int32_t InitMicrophone() override;
+ bool MicrophoneIsInitialized() const override;
+ int32_t SpeakerVolumeIsAvailable(bool& available) override;
+ int32_t SetSpeakerVolume(uint32_t volume) override;
+ int32_t SpeakerVolume(uint32_t& volume) const override;
+ int32_t MaxSpeakerVolume(uint32_t& maxVolume) const override;
+ int32_t MinSpeakerVolume(uint32_t& minVolume) const override;
+ int32_t MicrophoneVolumeIsAvailable(bool& available) override;
+ int32_t SetMicrophoneVolume(uint32_t volume) override;
+ int32_t MicrophoneVolume(uint32_t& volume) const override;
+ int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const override;
+ int32_t MinMicrophoneVolume(uint32_t& minVolume) const override;
+ int32_t MicrophoneMuteIsAvailable(bool& available) override;
+ int32_t SetMicrophoneMute(bool enable) override;
+ int32_t MicrophoneMute(bool& enabled) const override;
+ int32_t SpeakerMuteIsAvailable(bool& available) override;
+ int32_t SetSpeakerMute(bool enable) override;
+ int32_t SpeakerMute(bool& enabled) const override;
+ int32_t StereoPlayoutIsAvailable(bool& available) override;
+ int32_t SetStereoPlayout(bool enable) override;
+ int32_t StereoPlayout(bool& enabled) const override;
+ int32_t StereoRecordingIsAvailable(bool& available) override;
+ int32_t SetStereoRecording(bool enable) override;
+ int32_t StereoRecording(bool& enabled) const override;
+
+ // AudioSessionObserver methods. May be called from any thread.
+ void OnInterruptionBegin() override;
+ void OnInterruptionEnd() override;
+ void OnValidRouteChange() override;
+ void OnCanPlayOrRecordChange(bool can_play_or_record) override;
+ void OnChangedOutputVolume() override;
+
+ // VoiceProcessingAudioUnitObserver methods.
+ OSStatus OnDeliverRecordedData(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) override;
+ OSStatus OnGetPlayoutData(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) override;
+
+ // Handles messages from posts.
+ void OnMessage(rtc::Message* msg) override;
+
+ bool IsInterrupted();
+
+ private:
+ // Called by the relevant AudioSessionObserver methods on `thread_`.
+ void HandleInterruptionBegin();
+ void HandleInterruptionEnd();
+ void HandleValidRouteChange();
+ void HandleCanPlayOrRecordChange(bool can_play_or_record);
+ void HandleSampleRateChange();
+ void HandlePlayoutGlitchDetected();
+ void HandleOutputVolumeChange();
+
+ // Uses current `playout_parameters_` and `record_parameters_` to inform the
+ // audio device buffer (ADB) about our internal audio parameters.
+ void UpdateAudioDeviceBuffer();
+
+ // Since the preferred audio parameters are only hints to the OS, the actual
+ // values may be different once the AVAudioSession has been activated.
+ // This method asks for the current hardware parameters and takes actions
+ // if they should differ from what we have asked for initially. It also
+ // defines `playout_parameters_` and `record_parameters_`.
+ void SetupAudioBuffersForActiveAudioSession();
+
+ // Creates the audio unit.
+ bool CreateAudioUnit();
+
+ // Updates the audio unit state based on current state.
+ void UpdateAudioUnit(bool can_play_or_record);
+
+ // Configures the audio session for WebRTC.
+ bool ConfigureAudioSession();
+
+ // Like above, but requires caller to already hold session lock.
+ bool ConfigureAudioSessionLocked();
+
+ // Unconfigures the audio session.
+ void UnconfigureAudioSession();
+
+ // Activates our audio session, creates and initializes the voice-processing
+ // audio unit and verifies that we got the preferred native audio parameters.
+ bool InitPlayOrRecord();
+
+ // Closes and deletes the voice-processing I/O unit.
+ void ShutdownPlayOrRecord();
+
+ // Resets thread-checkers before a call is restarted.
+ void PrepareForNewStart();
+
+ // Determines whether voice processing should be enabled or disabled.
+ const bool bypass_voice_processing_;
+
+ // Ensures that methods are called from the same thread as this object is
+ // created on.
+ SequenceChecker thread_checker_;
+
+ // Native I/O audio thread checker.
+ SequenceChecker io_thread_checker_;
+
+ // Thread that this object is created on.
+ rtc::Thread* thread_;
+
+ // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
+ // The AudioDeviceBuffer is a member of the AudioDeviceModuleImpl instance
+ // and therefore outlives this object.
+ AudioDeviceBuffer* audio_device_buffer_;
+
+ // Contains audio parameters (sample rate, #channels, buffer size etc.) for
+ // the playout and recording sides. These structure is set in two steps:
+ // first, native sample rate and #channels are defined in Init(). Next, the
+ // audio session is activated and we verify that the preferred parameters
+ // were granted by the OS. At this stage it is also possible to add a third
+ // component to the parameters; the native I/O buffer duration.
+ // A RTC_CHECK will be hit if we for some reason fail to open an audio session
+ // using the specified parameters.
+ AudioParameters playout_parameters_;
+ AudioParameters record_parameters_;
+
+ // The AudioUnit used to play and record audio.
+ std::unique_ptr<VoiceProcessingAudioUnit> audio_unit_;
+
+ // FineAudioBuffer takes an AudioDeviceBuffer which delivers audio data
+ // in chunks of 10ms. It then allows for this data to be pulled in
+ // a finer or coarser granularity. I.e. interacting with this class instead
+ // of directly with the AudioDeviceBuffer one can ask for any number of
+ // audio data samples. Is also supports a similar scheme for the recording
+ // side.
+ // Example: native buffer size can be 128 audio frames at 16kHz sample rate.
+ // WebRTC will provide 480 audio frames per 10ms but iOS asks for 128
+ // in each callback (one every 8ms). This class can then ask for 128 and the
+ // FineAudioBuffer will ask WebRTC for new data only when needed and also
+ // cache non-utilized audio between callbacks. On the recording side, iOS
+ // can provide audio data frames of size 128 and these are accumulated until
+ // enough data to supply one 10ms call exists. This 10ms chunk is then sent
+ // to WebRTC and the remaining part is stored.
+ std::unique_ptr<FineAudioBuffer> fine_audio_buffer_;
+
+ // Temporary storage for recorded data. AudioUnitRender() renders into this
+ // array as soon as a frame of the desired buffer size has been recorded.
+ // On real iOS devices, the size will be fixed and set once. For iOS
+ // simulators, the size can vary from callback to callback and the size
+ // will be changed dynamically to account for this behavior.
+ rtc::BufferT<int16_t> record_audio_buffer_;
+
+ // Set to 1 when recording is active and 0 otherwise.
+ std::atomic<int> recording_;
+
+ // Set to 1 when playout is active and 0 otherwise.
+ std::atomic<int> playing_;
+
+ // Set to true after successful call to Init(), false otherwise.
+ bool initialized_ RTC_GUARDED_BY(thread_checker_);
+
+ // Set to true after successful call to InitRecording() or InitPlayout(),
+ // false otherwise.
+ bool audio_is_initialized_;
+
+ // Set to true if audio session is interrupted, false otherwise.
+ bool is_interrupted_;
+
+ // Audio interruption observer instance.
+ RTCNativeAudioSessionDelegateAdapter* audio_session_observer_
+ RTC_GUARDED_BY(thread_checker_);
+
+ // Set to true if we've activated the audio session.
+ bool has_configured_session_ RTC_GUARDED_BY(thread_checker_);
+
+ // Counts number of detected audio glitches on the playout side.
+ int64_t num_detected_playout_glitches_ RTC_GUARDED_BY(thread_checker_);
+ int64_t last_playout_time_ RTC_GUARDED_BY(io_thread_checker_);
+
+ // Counts number of playout callbacks per call.
+ // The value isupdated on the native I/O thread and later read on the
+ // creating thread (see thread_checker_) but at this stage no audio is
+ // active. Hence, it is a "thread safe" design and no lock is needed.
+ int64_t num_playout_callbacks_;
+
+ // Contains the time for when the last output volume change was detected.
+ int64_t last_output_volume_change_time_ RTC_GUARDED_BY(thread_checker_);
+};
+} // namespace ios_adm
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_DEVICE_IOS_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.mm b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.mm
new file mode 100644
index 0000000000..f3f87c04b6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_ios.mm
@@ -0,0 +1,1165 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+#import <Foundation/Foundation.h>
+
+#include "audio_device_ios.h"
+
+#include <cmath>
+
+#include "api/array_view.h"
+#include "helpers.h"
+#include "modules/audio_device/fine_audio_buffer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "rtc_base/time_utils.h"
+#include "system_wrappers/include/field_trial.h"
+#include "system_wrappers/include/metrics.h"
+
+#import "base/RTCLogging.h"
+#import "components/audio/RTCAudioSession+Private.h"
+#import "components/audio/RTCAudioSession.h"
+#import "components/audio/RTCAudioSessionConfiguration.h"
+#import "components/audio/RTCNativeAudioSessionDelegateAdapter.h"
+
+namespace webrtc {
+namespace ios_adm {
+
+#define LOGI() RTC_LOG(LS_INFO) << "AudioDeviceIOS::"
+
+#define LOG_AND_RETURN_IF_ERROR(error, message) \
+ do { \
+ OSStatus err = error; \
+ if (err) { \
+ RTC_LOG(LS_ERROR) << message << ": " << err; \
+ return false; \
+ } \
+ } while (0)
+
+#define LOG_IF_ERROR(error, message) \
+ do { \
+ OSStatus err = error; \
+ if (err) { \
+ RTC_LOG(LS_ERROR) << message << ": " << err; \
+ } \
+ } while (0)
+
+// Hardcoded delay estimates based on real measurements.
+// TODO(henrika): these value is not used in combination with built-in AEC.
+// Can most likely be removed.
+const UInt16 kFixedPlayoutDelayEstimate = 30;
+const UInt16 kFixedRecordDelayEstimate = 30;
+
+enum AudioDeviceMessageType : uint32_t {
+ kMessageTypeInterruptionBegin,
+ kMessageTypeInterruptionEnd,
+ kMessageTypeValidRouteChange,
+ kMessageTypeCanPlayOrRecordChange,
+ kMessageTypePlayoutGlitchDetected,
+ kMessageOutputVolumeChange,
+};
+
+using ios::CheckAndLogError;
+
+#if !defined(NDEBUG)
+// Returns true when the code runs on a device simulator.
+static bool DeviceIsSimulator() {
+ return ios::GetDeviceName() == "x86_64";
+}
+
+// Helper method that logs essential device information strings.
+static void LogDeviceInfo() {
+ RTC_LOG(LS_INFO) << "LogDeviceInfo";
+ @autoreleasepool {
+ RTC_LOG(LS_INFO) << " system name: " << ios::GetSystemName();
+ RTC_LOG(LS_INFO) << " system version: " << ios::GetSystemVersionAsString();
+ RTC_LOG(LS_INFO) << " device type: " << ios::GetDeviceType();
+ RTC_LOG(LS_INFO) << " device name: " << ios::GetDeviceName();
+ RTC_LOG(LS_INFO) << " process name: " << ios::GetProcessName();
+ RTC_LOG(LS_INFO) << " process ID: " << ios::GetProcessID();
+ RTC_LOG(LS_INFO) << " OS version: " << ios::GetOSVersionString();
+ RTC_LOG(LS_INFO) << " processing cores: " << ios::GetProcessorCount();
+ RTC_LOG(LS_INFO) << " low power mode: " << ios::GetLowPowerModeEnabled();
+#if TARGET_IPHONE_SIMULATOR
+ RTC_LOG(LS_INFO) << " TARGET_IPHONE_SIMULATOR is defined";
+#endif
+ RTC_LOG(LS_INFO) << " DeviceIsSimulator: " << DeviceIsSimulator();
+ }
+}
+#endif // !defined(NDEBUG)
+
+AudioDeviceIOS::AudioDeviceIOS(bool bypass_voice_processing)
+ : bypass_voice_processing_(bypass_voice_processing),
+ audio_device_buffer_(nullptr),
+ audio_unit_(nullptr),
+ recording_(0),
+ playing_(0),
+ initialized_(false),
+ audio_is_initialized_(false),
+ is_interrupted_(false),
+ has_configured_session_(false),
+ num_detected_playout_glitches_(0),
+ last_playout_time_(0),
+ num_playout_callbacks_(0),
+ last_output_volume_change_time_(0) {
+ LOGI() << "ctor" << ios::GetCurrentThreadDescription()
+ << ",bypass_voice_processing=" << bypass_voice_processing_;
+ io_thread_checker_.Detach();
+ thread_checker_.Detach();
+ thread_ = rtc::Thread::Current();
+
+ audio_session_observer_ = [[RTCNativeAudioSessionDelegateAdapter alloc] initWithObserver:this];
+}
+
+AudioDeviceIOS::~AudioDeviceIOS() {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ LOGI() << "~dtor" << ios::GetCurrentThreadDescription();
+ thread_->Clear(this);
+ Terminate();
+ audio_session_observer_ = nil;
+}
+
+void AudioDeviceIOS::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ LOGI() << "AttachAudioBuffer";
+ RTC_DCHECK(audioBuffer);
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ audio_device_buffer_ = audioBuffer;
+}
+
+AudioDeviceGeneric::InitStatus AudioDeviceIOS::Init() {
+ LOGI() << "Init";
+ io_thread_checker_.Detach();
+ thread_checker_.Detach();
+
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ if (initialized_) {
+ return InitStatus::OK;
+ }
+#if !defined(NDEBUG)
+ LogDeviceInfo();
+#endif
+ // Store the preferred sample rate and preferred number of channels already
+ // here. They have not been set and confirmed yet since configureForWebRTC
+ // is not called until audio is about to start. However, it makes sense to
+ // store the parameters now and then verify at a later stage.
+ RTC_OBJC_TYPE(RTCAudioSessionConfiguration)* config =
+ [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
+ playout_parameters_.reset(config.sampleRate, config.outputNumberOfChannels);
+ record_parameters_.reset(config.sampleRate, config.inputNumberOfChannels);
+ // Ensure that the audio device buffer (ADB) knows about the internal audio
+ // parameters. Note that, even if we are unable to get a mono audio session,
+ // we will always tell the I/O audio unit to do a channel format conversion
+ // to guarantee mono on the "input side" of the audio unit.
+ UpdateAudioDeviceBuffer();
+ initialized_ = true;
+ return InitStatus::OK;
+}
+
+int32_t AudioDeviceIOS::Terminate() {
+ LOGI() << "Terminate";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ if (!initialized_) {
+ return 0;
+ }
+ StopPlayout();
+ StopRecording();
+ initialized_ = false;
+ return 0;
+}
+
+bool AudioDeviceIOS::Initialized() const {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ return initialized_;
+}
+
+int32_t AudioDeviceIOS::InitPlayout() {
+ LOGI() << "InitPlayout";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_DCHECK(initialized_);
+ RTC_DCHECK(!audio_is_initialized_);
+ RTC_DCHECK(!playing_.load());
+ if (!audio_is_initialized_) {
+ if (!InitPlayOrRecord()) {
+ RTC_LOG_F(LS_ERROR) << "InitPlayOrRecord failed for InitPlayout!";
+ return -1;
+ }
+ }
+ audio_is_initialized_ = true;
+ return 0;
+}
+
+bool AudioDeviceIOS::PlayoutIsInitialized() const {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ return audio_is_initialized_;
+}
+
+bool AudioDeviceIOS::RecordingIsInitialized() const {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ return audio_is_initialized_;
+}
+
+int32_t AudioDeviceIOS::InitRecording() {
+ LOGI() << "InitRecording";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_DCHECK(initialized_);
+ RTC_DCHECK(!audio_is_initialized_);
+ RTC_DCHECK(!recording_.load());
+ if (!audio_is_initialized_) {
+ if (!InitPlayOrRecord()) {
+ RTC_LOG_F(LS_ERROR) << "InitPlayOrRecord failed for InitRecording!";
+ return -1;
+ }
+ }
+ audio_is_initialized_ = true;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::StartPlayout() {
+ LOGI() << "StartPlayout";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_DCHECK(audio_is_initialized_);
+ RTC_DCHECK(!playing_.load());
+ RTC_DCHECK(audio_unit_);
+ if (fine_audio_buffer_) {
+ fine_audio_buffer_->ResetPlayout();
+ }
+ if (!recording_.load() && audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) {
+ OSStatus result = audio_unit_->Start();
+ if (result != noErr) {
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session notifyAudioUnitStartFailedWithError:result];
+ RTCLogError(@"StartPlayout failed to start audio unit, reason %d", result);
+ return -1;
+ }
+ RTC_LOG(LS_INFO) << "Voice-Processing I/O audio unit is now started";
+ }
+ playing_.store(1, std::memory_order_release);
+ num_playout_callbacks_ = 0;
+ num_detected_playout_glitches_ = 0;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::StopPlayout() {
+ LOGI() << "StopPlayout";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ if (!audio_is_initialized_ || !playing_.load()) {
+ return 0;
+ }
+ if (!recording_.load()) {
+ ShutdownPlayOrRecord();
+ audio_is_initialized_ = false;
+ }
+ playing_.store(0, std::memory_order_release);
+
+ // Derive average number of calls to OnGetPlayoutData() between detected
+ // audio glitches and add the result to a histogram.
+ int average_number_of_playout_callbacks_between_glitches = 100000;
+ RTC_DCHECK_GE(num_playout_callbacks_, num_detected_playout_glitches_);
+ if (num_detected_playout_glitches_ > 0) {
+ average_number_of_playout_callbacks_between_glitches =
+ num_playout_callbacks_ / num_detected_playout_glitches_;
+ }
+ RTC_HISTOGRAM_COUNTS_100000("WebRTC.Audio.AveragePlayoutCallbacksBetweenGlitches",
+ average_number_of_playout_callbacks_between_glitches);
+ RTCLog(@"Average number of playout callbacks between glitches: %d",
+ average_number_of_playout_callbacks_between_glitches);
+ return 0;
+}
+
+bool AudioDeviceIOS::Playing() const {
+ return playing_.load();
+}
+
+int32_t AudioDeviceIOS::StartRecording() {
+ LOGI() << "StartRecording";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_DCHECK(audio_is_initialized_);
+ RTC_DCHECK(!recording_.load());
+ RTC_DCHECK(audio_unit_);
+ if (fine_audio_buffer_) {
+ fine_audio_buffer_->ResetRecord();
+ }
+ if (!playing_.load() && audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) {
+ OSStatus result = audio_unit_->Start();
+ if (result != noErr) {
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session notifyAudioUnitStartFailedWithError:result];
+ RTCLogError(@"StartRecording failed to start audio unit, reason %d", result);
+ return -1;
+ }
+ RTC_LOG(LS_INFO) << "Voice-Processing I/O audio unit is now started";
+ }
+ recording_.store(1, std::memory_order_release);
+ return 0;
+}
+
+int32_t AudioDeviceIOS::StopRecording() {
+ LOGI() << "StopRecording";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ if (!audio_is_initialized_ || !recording_.load()) {
+ return 0;
+ }
+ if (!playing_.load()) {
+ ShutdownPlayOrRecord();
+ audio_is_initialized_ = false;
+ }
+ recording_.store(0, std::memory_order_release);
+ return 0;
+}
+
+bool AudioDeviceIOS::Recording() const {
+ return recording_.load();
+}
+
+int32_t AudioDeviceIOS::PlayoutDelay(uint16_t& delayMS) const {
+ delayMS = kFixedPlayoutDelayEstimate;
+ return 0;
+}
+
+int AudioDeviceIOS::GetPlayoutAudioParameters(AudioParameters* params) const {
+ LOGI() << "GetPlayoutAudioParameters";
+ RTC_DCHECK(playout_parameters_.is_valid());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ *params = playout_parameters_;
+ return 0;
+}
+
+int AudioDeviceIOS::GetRecordAudioParameters(AudioParameters* params) const {
+ LOGI() << "GetRecordAudioParameters";
+ RTC_DCHECK(record_parameters_.is_valid());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ *params = record_parameters_;
+ return 0;
+}
+
+void AudioDeviceIOS::OnInterruptionBegin() {
+ RTC_DCHECK(thread_);
+ LOGI() << "OnInterruptionBegin";
+ thread_->Post(RTC_FROM_HERE, this, kMessageTypeInterruptionBegin);
+}
+
+void AudioDeviceIOS::OnInterruptionEnd() {
+ RTC_DCHECK(thread_);
+ LOGI() << "OnInterruptionEnd";
+ thread_->Post(RTC_FROM_HERE, this, kMessageTypeInterruptionEnd);
+}
+
+void AudioDeviceIOS::OnValidRouteChange() {
+ RTC_DCHECK(thread_);
+ thread_->Post(RTC_FROM_HERE, this, kMessageTypeValidRouteChange);
+}
+
+void AudioDeviceIOS::OnCanPlayOrRecordChange(bool can_play_or_record) {
+ RTC_DCHECK(thread_);
+ thread_->Post(RTC_FROM_HERE,
+ this,
+ kMessageTypeCanPlayOrRecordChange,
+ new rtc::TypedMessageData<bool>(can_play_or_record));
+}
+
+void AudioDeviceIOS::OnChangedOutputVolume() {
+ RTC_DCHECK(thread_);
+ thread_->Post(RTC_FROM_HERE, this, kMessageOutputVolumeChange);
+}
+
+OSStatus AudioDeviceIOS::OnDeliverRecordedData(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* /* io_data */) {
+ RTC_DCHECK_RUN_ON(&io_thread_checker_);
+ OSStatus result = noErr;
+ // Simply return if recording is not enabled.
+ if (!recording_.load(std::memory_order_acquire)) return result;
+
+ // Set the size of our own audio buffer and clear it first to avoid copying
+ // in combination with potential reallocations.
+ // On real iOS devices, the size will only be set once (at first callback).
+ record_audio_buffer_.Clear();
+ record_audio_buffer_.SetSize(num_frames);
+
+ // Allocate AudioBuffers to be used as storage for the received audio.
+ // The AudioBufferList structure works as a placeholder for the
+ // AudioBuffer structure, which holds a pointer to the actual data buffer
+ // in `record_audio_buffer_`. Recorded audio will be rendered into this memory
+ // at each input callback when calling AudioUnitRender().
+ AudioBufferList audio_buffer_list;
+ audio_buffer_list.mNumberBuffers = 1;
+ AudioBuffer* audio_buffer = &audio_buffer_list.mBuffers[0];
+ audio_buffer->mNumberChannels = record_parameters_.channels();
+ audio_buffer->mDataByteSize =
+ record_audio_buffer_.size() * VoiceProcessingAudioUnit::kBytesPerSample;
+ audio_buffer->mData = reinterpret_cast<int8_t*>(record_audio_buffer_.data());
+
+ // Obtain the recorded audio samples by initiating a rendering cycle.
+ // Since it happens on the input bus, the `io_data` parameter is a reference
+ // to the preallocated audio buffer list that the audio unit renders into.
+ // We can make the audio unit provide a buffer instead in io_data, but we
+ // currently just use our own.
+ // TODO(henrika): should error handling be improved?
+ result = audio_unit_->Render(flags, time_stamp, bus_number, num_frames, &audio_buffer_list);
+ if (result != noErr) {
+ RTCLogError(@"Failed to render audio.");
+ return result;
+ }
+
+ // Get a pointer to the recorded audio and send it to the WebRTC ADB.
+ // Use the FineAudioBuffer instance to convert between native buffer size
+ // and the 10ms buffer size used by WebRTC.
+ fine_audio_buffer_->DeliverRecordedData(record_audio_buffer_, kFixedRecordDelayEstimate);
+ return noErr;
+}
+
+OSStatus AudioDeviceIOS::OnGetPlayoutData(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) {
+ RTC_DCHECK_RUN_ON(&io_thread_checker_);
+ // Verify 16-bit, noninterleaved mono PCM signal format.
+ RTC_DCHECK_EQ(1, io_data->mNumberBuffers);
+ AudioBuffer* audio_buffer = &io_data->mBuffers[0];
+ RTC_DCHECK_EQ(1, audio_buffer->mNumberChannels);
+
+ // Produce silence and give audio unit a hint about it if playout is not
+ // activated.
+ if (!playing_.load(std::memory_order_acquire)) {
+ const size_t size_in_bytes = audio_buffer->mDataByteSize;
+ RTC_CHECK_EQ(size_in_bytes / VoiceProcessingAudioUnit::kBytesPerSample, num_frames);
+ *flags |= kAudioUnitRenderAction_OutputIsSilence;
+ memset(static_cast<int8_t*>(audio_buffer->mData), 0, size_in_bytes);
+ return noErr;
+ }
+
+ // Measure time since last call to OnGetPlayoutData() and see if it is larger
+ // than a well defined threshold which depends on the current IO buffer size.
+ // If so, we have an indication of a glitch in the output audio since the
+ // core audio layer will most likely run dry in this state.
+ ++num_playout_callbacks_;
+ const int64_t now_time = rtc::TimeMillis();
+ if (time_stamp->mSampleTime != num_frames) {
+ const int64_t delta_time = now_time - last_playout_time_;
+ const int glitch_threshold = 1.6 * playout_parameters_.GetBufferSizeInMilliseconds();
+ if (delta_time > glitch_threshold) {
+ RTCLogWarning(@"Possible playout audio glitch detected.\n"
+ " Time since last OnGetPlayoutData was %lld ms.\n",
+ delta_time);
+ // Exclude extreme delta values since they do most likely not correspond
+ // to a real glitch. Instead, the most probable cause is that a headset
+ // has been plugged in or out. There are more direct ways to detect
+ // audio device changes (see HandleValidRouteChange()) but experiments
+ // show that using it leads to more complex implementations.
+ // TODO(henrika): more tests might be needed to come up with an even
+ // better upper limit.
+ if (glitch_threshold < 120 && delta_time > 120) {
+ RTCLog(@"Glitch warning is ignored. Probably caused by device switch.");
+ } else {
+ thread_->Post(RTC_FROM_HERE, this, kMessageTypePlayoutGlitchDetected);
+ }
+ }
+ }
+ last_playout_time_ = now_time;
+
+ // Read decoded 16-bit PCM samples from WebRTC (using a size that matches
+ // the native I/O audio unit) and copy the result to the audio buffer in the
+ // `io_data` destination.
+ fine_audio_buffer_->GetPlayoutData(
+ rtc::ArrayView<int16_t>(static_cast<int16_t*>(audio_buffer->mData), num_frames),
+ kFixedPlayoutDelayEstimate);
+ return noErr;
+}
+
+void AudioDeviceIOS::OnMessage(rtc::Message* msg) {
+ switch (msg->message_id) {
+ case kMessageTypeInterruptionBegin:
+ HandleInterruptionBegin();
+ break;
+ case kMessageTypeInterruptionEnd:
+ HandleInterruptionEnd();
+ break;
+ case kMessageTypeValidRouteChange:
+ HandleValidRouteChange();
+ break;
+ case kMessageTypeCanPlayOrRecordChange: {
+ rtc::TypedMessageData<bool>* data = static_cast<rtc::TypedMessageData<bool>*>(msg->pdata);
+ HandleCanPlayOrRecordChange(data->data());
+ delete data;
+ break;
+ }
+ case kMessageTypePlayoutGlitchDetected:
+ HandlePlayoutGlitchDetected();
+ break;
+ case kMessageOutputVolumeChange:
+ HandleOutputVolumeChange();
+ break;
+ }
+}
+
+void AudioDeviceIOS::HandleInterruptionBegin() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Interruption begin. IsInterrupted changed from %d to 1.", is_interrupted_);
+ if (audio_unit_ && audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
+ RTCLog(@"Stopping the audio unit due to interruption begin.");
+ if (!audio_unit_->Stop()) {
+ RTCLogError(@"Failed to stop the audio unit for interruption begin.");
+ }
+ PrepareForNewStart();
+ }
+ is_interrupted_ = true;
+}
+
+void AudioDeviceIOS::HandleInterruptionEnd() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Interruption ended. IsInterrupted changed from %d to 0. "
+ "Updating audio unit state.",
+ is_interrupted_);
+ is_interrupted_ = false;
+ if (!audio_unit_) return;
+ if (webrtc::field_trial::IsEnabled("WebRTC-Audio-iOS-Holding")) {
+ // Work around an issue where audio does not restart properly after an interruption
+ // by restarting the audio unit when the interruption ends.
+ if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
+ audio_unit_->Stop();
+ PrepareForNewStart();
+ }
+ if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) {
+ audio_unit_->Uninitialize();
+ }
+ // Allocate new buffers given the potentially new stream format.
+ SetupAudioBuffersForActiveAudioSession();
+ }
+ UpdateAudioUnit([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance].canPlayOrRecord);
+}
+
+void AudioDeviceIOS::HandleValidRouteChange() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ RTCLog(@"%@", session);
+ HandleSampleRateChange();
+}
+
+void AudioDeviceIOS::HandleCanPlayOrRecordChange(bool can_play_or_record) {
+ RTCLog(@"Handling CanPlayOrRecord change to: %d", can_play_or_record);
+ UpdateAudioUnit(can_play_or_record);
+}
+
+void AudioDeviceIOS::HandleSampleRateChange() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Handling sample rate change.");
+
+ // Don't do anything if we're interrupted.
+ if (is_interrupted_) {
+ RTCLog(@"Ignoring sample rate change due to interruption.");
+ return;
+ }
+
+ // If we don't have an audio unit yet, or the audio unit is uninitialized,
+ // there is no work to do.
+ if (!audio_unit_ || audio_unit_->GetState() < VoiceProcessingAudioUnit::kInitialized) {
+ return;
+ }
+
+ // The audio unit is already initialized or started.
+ // Check to see if the sample rate or buffer size has changed.
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ const double new_sample_rate = session.sampleRate;
+ const NSTimeInterval session_buffer_duration = session.IOBufferDuration;
+ const size_t new_frames_per_buffer =
+ static_cast<size_t>(new_sample_rate * session_buffer_duration + .5);
+ const double current_sample_rate = playout_parameters_.sample_rate();
+ const size_t current_frames_per_buffer = playout_parameters_.frames_per_buffer();
+ RTCLog(@"Handling playout sample rate change:\n"
+ " Session sample rate: %f frames_per_buffer: %lu\n"
+ " ADM sample rate: %f frames_per_buffer: %lu",
+ new_sample_rate,
+ (unsigned long)new_frames_per_buffer,
+ current_sample_rate,
+ (unsigned long)current_frames_per_buffer);
+
+ // Sample rate and buffer size are the same, no work to do.
+ if (std::abs(current_sample_rate - new_sample_rate) <= DBL_EPSILON &&
+ current_frames_per_buffer == new_frames_per_buffer) {
+ RTCLog(@"Ignoring sample rate change since audio parameters are intact.");
+ return;
+ }
+
+ // Extra sanity check to ensure that the new sample rate is valid.
+ if (new_sample_rate <= 0.0) {
+ RTCLogError(@"Sample rate is invalid: %f", new_sample_rate);
+ return;
+ }
+
+ // We need to adjust our format and buffer sizes.
+ // The stream format is about to be changed and it requires that we first
+ // stop and uninitialize the audio unit to deallocate its resources.
+ RTCLog(@"Stopping and uninitializing audio unit to adjust buffers.");
+ bool restart_audio_unit = false;
+ if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
+ audio_unit_->Stop();
+ restart_audio_unit = true;
+ PrepareForNewStart();
+ }
+ if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) {
+ audio_unit_->Uninitialize();
+ }
+
+ // Allocate new buffers given the new stream format.
+ SetupAudioBuffersForActiveAudioSession();
+
+ // Initialize the audio unit again with the new sample rate.
+ if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) {
+ RTCLogError(@"Failed to initialize the audio unit with sample rate: %d",
+ playout_parameters_.sample_rate());
+ return;
+ }
+
+ // Restart the audio unit if it was already running.
+ if (restart_audio_unit) {
+ OSStatus result = audio_unit_->Start();
+ if (result != noErr) {
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session notifyAudioUnitStartFailedWithError:result];
+ RTCLogError(@"Failed to start audio unit with sample rate: %d, reason %d",
+ playout_parameters_.sample_rate(),
+ result);
+ return;
+ }
+ }
+ RTCLog(@"Successfully handled sample rate change.");
+}
+
+void AudioDeviceIOS::HandlePlayoutGlitchDetected() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ // Don't update metrics if we're interrupted since a "glitch" is expected
+ // in this state.
+ if (is_interrupted_) {
+ RTCLog(@"Ignoring audio glitch due to interruption.");
+ return;
+ }
+ // Avoid doing glitch detection for two seconds after a volume change
+ // has been detected to reduce the risk of false alarm.
+ if (last_output_volume_change_time_ > 0 &&
+ rtc::TimeSince(last_output_volume_change_time_) < 2000) {
+ RTCLog(@"Ignoring audio glitch due to recent output volume change.");
+ return;
+ }
+ num_detected_playout_glitches_++;
+ RTCLog(@"Number of detected playout glitches: %lld", num_detected_playout_glitches_);
+
+ int64_t glitch_count = num_detected_playout_glitches_;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session notifyDidDetectPlayoutGlitch:glitch_count];
+ });
+}
+
+void AudioDeviceIOS::HandleOutputVolumeChange() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Output volume change detected.");
+ // Store time of this detection so it can be used to defer detection of
+ // glitches too close in time to this event.
+ last_output_volume_change_time_ = rtc::TimeMillis();
+}
+
+void AudioDeviceIOS::UpdateAudioDeviceBuffer() {
+ LOGI() << "UpdateAudioDevicebuffer";
+ // AttachAudioBuffer() is called at construction by the main class but check
+ // just in case.
+ RTC_DCHECK(audio_device_buffer_) << "AttachAudioBuffer must be called first";
+ RTC_DCHECK_GT(playout_parameters_.sample_rate(), 0);
+ RTC_DCHECK_GT(record_parameters_.sample_rate(), 0);
+ RTC_DCHECK_EQ(playout_parameters_.channels(), 1);
+ RTC_DCHECK_EQ(record_parameters_.channels(), 1);
+ // Inform the audio device buffer (ADB) about the new audio format.
+ audio_device_buffer_->SetPlayoutSampleRate(playout_parameters_.sample_rate());
+ audio_device_buffer_->SetPlayoutChannels(playout_parameters_.channels());
+ audio_device_buffer_->SetRecordingSampleRate(record_parameters_.sample_rate());
+ audio_device_buffer_->SetRecordingChannels(record_parameters_.channels());
+}
+
+void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
+ LOGI() << "SetupAudioBuffersForActiveAudioSession";
+ // Verify the current values once the audio session has been activated.
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ double sample_rate = session.sampleRate;
+ NSTimeInterval io_buffer_duration = session.IOBufferDuration;
+ RTCLog(@"%@", session);
+
+ // Log a warning message for the case when we are unable to set the preferred
+ // hardware sample rate but continue and use the non-ideal sample rate after
+ // reinitializing the audio parameters. Most BT headsets only support 8kHz or
+ // 16kHz.
+ RTC_OBJC_TYPE(RTCAudioSessionConfiguration)* webRTCConfig =
+ [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
+ if (sample_rate != webRTCConfig.sampleRate) {
+ RTC_LOG(LS_WARNING) << "Unable to set the preferred sample rate";
+ }
+
+ // Crash reports indicates that it can happen in rare cases that the reported
+ // sample rate is less than or equal to zero. If that happens and if a valid
+ // sample rate has already been set during initialization, the best guess we
+ // can do is to reuse the current sample rate.
+ if (sample_rate <= DBL_EPSILON && playout_parameters_.sample_rate() > 0) {
+ RTCLogError(@"Reported rate is invalid: %f. "
+ "Using %d as sample rate instead.",
+ sample_rate, playout_parameters_.sample_rate());
+ sample_rate = playout_parameters_.sample_rate();
+ }
+
+ // At this stage, we also know the exact IO buffer duration and can add
+ // that info to the existing audio parameters where it is converted into
+ // number of audio frames.
+ // Example: IO buffer size = 0.008 seconds <=> 128 audio frames at 16kHz.
+ // Hence, 128 is the size we expect to see in upcoming render callbacks.
+ playout_parameters_.reset(sample_rate, playout_parameters_.channels(), io_buffer_duration);
+ RTC_DCHECK(playout_parameters_.is_complete());
+ record_parameters_.reset(sample_rate, record_parameters_.channels(), io_buffer_duration);
+ RTC_DCHECK(record_parameters_.is_complete());
+ RTC_LOG(LS_INFO) << " frames per I/O buffer: " << playout_parameters_.frames_per_buffer();
+ RTC_LOG(LS_INFO) << " bytes per I/O buffer: " << playout_parameters_.GetBytesPerBuffer();
+ RTC_DCHECK_EQ(playout_parameters_.GetBytesPerBuffer(), record_parameters_.GetBytesPerBuffer());
+
+ // Update the ADB parameters since the sample rate might have changed.
+ UpdateAudioDeviceBuffer();
+
+ // Create a modified audio buffer class which allows us to ask for,
+ // or deliver, any number of samples (and not only multiple of 10ms) to match
+ // the native audio unit buffer size.
+ RTC_DCHECK(audio_device_buffer_);
+ fine_audio_buffer_.reset(new FineAudioBuffer(audio_device_buffer_));
+}
+
+bool AudioDeviceIOS::CreateAudioUnit() {
+ RTC_DCHECK(!audio_unit_);
+
+ audio_unit_.reset(new VoiceProcessingAudioUnit(bypass_voice_processing_, this));
+ if (!audio_unit_->Init()) {
+ audio_unit_.reset();
+ return false;
+ }
+
+ return true;
+}
+
+void AudioDeviceIOS::UpdateAudioUnit(bool can_play_or_record) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Updating audio unit state. CanPlayOrRecord=%d IsInterrupted=%d",
+ can_play_or_record,
+ is_interrupted_);
+
+ if (is_interrupted_) {
+ RTCLog(@"Ignoring audio unit update due to interruption.");
+ return;
+ }
+
+ // If we're not initialized we don't need to do anything. Audio unit will
+ // be initialized on initialization.
+ if (!audio_is_initialized_) return;
+
+ // If we're initialized, we must have an audio unit.
+ RTC_DCHECK(audio_unit_);
+
+ bool should_initialize_audio_unit = false;
+ bool should_uninitialize_audio_unit = false;
+ bool should_start_audio_unit = false;
+ bool should_stop_audio_unit = false;
+
+ switch (audio_unit_->GetState()) {
+ case VoiceProcessingAudioUnit::kInitRequired:
+ RTCLog(@"VPAU state: InitRequired");
+ RTC_DCHECK_NOTREACHED();
+ break;
+ case VoiceProcessingAudioUnit::kUninitialized:
+ RTCLog(@"VPAU state: Uninitialized");
+ should_initialize_audio_unit = can_play_or_record;
+ should_start_audio_unit =
+ should_initialize_audio_unit && (playing_.load() || recording_.load());
+ break;
+ case VoiceProcessingAudioUnit::kInitialized:
+ RTCLog(@"VPAU state: Initialized");
+ should_start_audio_unit = can_play_or_record && (playing_.load() || recording_.load());
+ should_uninitialize_audio_unit = !can_play_or_record;
+ break;
+ case VoiceProcessingAudioUnit::kStarted:
+ RTCLog(@"VPAU state: Started");
+ RTC_DCHECK(playing_.load() || recording_.load());
+ should_stop_audio_unit = !can_play_or_record;
+ should_uninitialize_audio_unit = should_stop_audio_unit;
+ break;
+ }
+
+ if (should_initialize_audio_unit) {
+ RTCLog(@"Initializing audio unit for UpdateAudioUnit");
+ ConfigureAudioSession();
+ SetupAudioBuffersForActiveAudioSession();
+ if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) {
+ RTCLogError(@"Failed to initialize audio unit.");
+ return;
+ }
+ }
+
+ if (should_start_audio_unit) {
+ RTCLog(@"Starting audio unit for UpdateAudioUnit");
+ // Log session settings before trying to start audio streaming.
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ RTCLog(@"%@", session);
+ OSStatus result = audio_unit_->Start();
+ if (result != noErr) {
+ [session notifyAudioUnitStartFailedWithError:result];
+ RTCLogError(@"Failed to start audio unit, reason %d", result);
+ return;
+ }
+ }
+
+ if (should_stop_audio_unit) {
+ RTCLog(@"Stopping audio unit for UpdateAudioUnit");
+ if (!audio_unit_->Stop()) {
+ RTCLogError(@"Failed to stop audio unit.");
+ PrepareForNewStart();
+ return;
+ }
+ PrepareForNewStart();
+ }
+
+ if (should_uninitialize_audio_unit) {
+ RTCLog(@"Uninitializing audio unit for UpdateAudioUnit");
+ audio_unit_->Uninitialize();
+ UnconfigureAudioSession();
+ }
+}
+
+bool AudioDeviceIOS::ConfigureAudioSession() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Configuring audio session.");
+ if (has_configured_session_) {
+ RTCLogWarning(@"Audio session already configured.");
+ return false;
+ }
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session lockForConfiguration];
+ bool success = [session configureWebRTCSession:nil];
+ [session unlockForConfiguration];
+ if (success) {
+ has_configured_session_ = true;
+ RTCLog(@"Configured audio session.");
+ } else {
+ RTCLog(@"Failed to configure audio session.");
+ }
+ return success;
+}
+
+bool AudioDeviceIOS::ConfigureAudioSessionLocked() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Configuring audio session.");
+ if (has_configured_session_) {
+ RTCLogWarning(@"Audio session already configured.");
+ return false;
+ }
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ bool success = [session configureWebRTCSession:nil];
+ if (success) {
+ has_configured_session_ = true;
+ RTCLog(@"Configured audio session.");
+ } else {
+ RTCLog(@"Failed to configure audio session.");
+ }
+ return success;
+}
+
+void AudioDeviceIOS::UnconfigureAudioSession() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTCLog(@"Unconfiguring audio session.");
+ if (!has_configured_session_) {
+ RTCLogWarning(@"Audio session already unconfigured.");
+ return;
+ }
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session lockForConfiguration];
+ [session unconfigureWebRTCSession:nil];
+ [session endWebRTCSession:nil];
+ [session unlockForConfiguration];
+ has_configured_session_ = false;
+ RTCLog(@"Unconfigured audio session.");
+}
+
+bool AudioDeviceIOS::InitPlayOrRecord() {
+ LOGI() << "InitPlayOrRecord";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+
+ // There should be no audio unit at this point.
+ if (!CreateAudioUnit()) {
+ return false;
+ }
+
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ // Subscribe to audio session events.
+ [session pushDelegate:audio_session_observer_];
+ is_interrupted_ = session.isInterrupted ? true : false;
+
+ // Lock the session to make configuration changes.
+ [session lockForConfiguration];
+ NSError* error = nil;
+ if (![session beginWebRTCSession:&error]) {
+ [session unlockForConfiguration];
+ RTCLogError(@"Failed to begin WebRTC session: %@", error.localizedDescription);
+ audio_unit_.reset();
+ return false;
+ }
+
+ // If we are ready to play or record, and if the audio session can be
+ // configured, then initialize the audio unit.
+ if (session.canPlayOrRecord) {
+ if (!ConfigureAudioSessionLocked()) {
+ // One possible reason for failure is if an attempt was made to use the
+ // audio session during or after a Media Services failure.
+ // See AVAudioSessionErrorCodeMediaServicesFailed for details.
+ [session unlockForConfiguration];
+ audio_unit_.reset();
+ return false;
+ }
+ SetupAudioBuffersForActiveAudioSession();
+ audio_unit_->Initialize(playout_parameters_.sample_rate());
+ }
+
+ // Release the lock.
+ [session unlockForConfiguration];
+ return true;
+}
+
+void AudioDeviceIOS::ShutdownPlayOrRecord() {
+ LOGI() << "ShutdownPlayOrRecord";
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+
+ // Stop the audio unit to prevent any additional audio callbacks.
+ audio_unit_->Stop();
+
+ // Close and delete the voice-processing I/O unit.
+ audio_unit_.reset();
+
+ // Detach thread checker for the AURemoteIO::IOThread to ensure that the
+ // next session uses a fresh thread id.
+ io_thread_checker_.Detach();
+
+ // Remove audio session notification observers.
+ RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session removeDelegate:audio_session_observer_];
+
+ // All I/O should be stopped or paused prior to deactivating the audio
+ // session, hence we deactivate as last action.
+ UnconfigureAudioSession();
+}
+
+void AudioDeviceIOS::PrepareForNewStart() {
+ LOGI() << "PrepareForNewStart";
+ // The audio unit has been stopped and preparations are needed for an upcoming
+ // restart. It will result in audio callbacks from a new native I/O thread
+ // which means that we must detach thread checkers here to be prepared for an
+ // upcoming new audio stream.
+ io_thread_checker_.Detach();
+}
+
+bool AudioDeviceIOS::IsInterrupted() {
+ return is_interrupted_;
+}
+
+#pragma mark - Not Implemented
+
+int32_t AudioDeviceIOS::ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const {
+ audioLayer = AudioDeviceModule::kPlatformDefaultAudio;
+ return 0;
+}
+
+int16_t AudioDeviceIOS::PlayoutDevices() {
+ // TODO(henrika): improve.
+ RTC_LOG_F(LS_WARNING) << "Not implemented";
+ return (int16_t)1;
+}
+
+int16_t AudioDeviceIOS::RecordingDevices() {
+ // TODO(henrika): improve.
+ RTC_LOG_F(LS_WARNING) << "Not implemented";
+ return (int16_t)1;
+}
+
+int32_t AudioDeviceIOS::InitSpeaker() {
+ return 0;
+}
+
+bool AudioDeviceIOS::SpeakerIsInitialized() const {
+ return true;
+}
+
+int32_t AudioDeviceIOS::SpeakerVolumeIsAvailable(bool& available) {
+ available = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetSpeakerVolume(uint32_t volume) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::SpeakerVolume(uint32_t& volume) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::MaxSpeakerVolume(uint32_t& maxVolume) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::MinSpeakerVolume(uint32_t& minVolume) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::SpeakerMuteIsAvailable(bool& available) {
+ available = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetSpeakerMute(bool enable) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::SpeakerMute(bool& enabled) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::SetPlayoutDevice(uint16_t index) {
+ RTC_LOG_F(LS_WARNING) << "Not implemented";
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::InitMicrophone() {
+ return 0;
+}
+
+bool AudioDeviceIOS::MicrophoneIsInitialized() const {
+ return true;
+}
+
+int32_t AudioDeviceIOS::MicrophoneMuteIsAvailable(bool& available) {
+ available = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetMicrophoneMute(bool enable) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::MicrophoneMute(bool& enabled) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::StereoRecordingIsAvailable(bool& available) {
+ available = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetStereoRecording(bool enable) {
+ RTC_LOG_F(LS_WARNING) << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::StereoRecording(bool& enabled) const {
+ enabled = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::StereoPlayoutIsAvailable(bool& available) {
+ available = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetStereoPlayout(bool enable) {
+ RTC_LOG_F(LS_WARNING) << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::StereoPlayout(bool& enabled) const {
+ enabled = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::MicrophoneVolumeIsAvailable(bool& available) {
+ available = false;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetMicrophoneVolume(uint32_t volume) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::MicrophoneVolume(uint32_t& volume) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::MaxMicrophoneVolume(uint32_t& maxVolume) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::MinMicrophoneVolume(uint32_t& minVolume) const {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::PlayoutDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::RecordingDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::SetRecordingDevice(uint16_t index) {
+ RTC_LOG_F(LS_WARNING) << "Not implemented";
+ return 0;
+}
+
+int32_t AudioDeviceIOS::SetRecordingDevice(AudioDeviceModule::WindowsDeviceType) {
+ RTC_DCHECK_NOTREACHED() << "Not implemented";
+ return -1;
+}
+
+int32_t AudioDeviceIOS::PlayoutIsAvailable(bool& available) {
+ available = true;
+ return 0;
+}
+
+int32_t AudioDeviceIOS::RecordingIsAvailable(bool& available) {
+ available = true;
+ return 0;
+}
+
+} // namespace ios_adm
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.h b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.h
new file mode 100644
index 0000000000..9bcf114e32
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_DEVICE_MODULE_IOS_H_
+#define SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_DEVICE_MODULE_IOS_H_
+
+#include <memory>
+
+#include "audio_device_ios.h"
+
+#include "api/task_queue/task_queue_factory.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+class AudioDeviceGeneric;
+
+namespace ios_adm {
+
+class AudioDeviceModuleIOS : public AudioDeviceModule {
+ public:
+ int32_t AttachAudioBuffer();
+
+ explicit AudioDeviceModuleIOS(bool bypass_voice_processing);
+ ~AudioDeviceModuleIOS() override;
+
+ // Retrieve the currently utilized audio layer
+ int32_t ActiveAudioLayer(AudioLayer* audioLayer) const override;
+
+ // Full-duplex transportation of PCM audio
+ int32_t RegisterAudioCallback(AudioTransport* audioCallback) override;
+
+ // Main initializaton and termination
+ int32_t Init() override;
+ int32_t Terminate() override;
+ bool Initialized() const override;
+
+ // Device enumeration
+ int16_t PlayoutDevices() override;
+ int16_t RecordingDevices() override;
+ int32_t PlayoutDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) override;
+ int32_t RecordingDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) override;
+
+ // Device selection
+ int32_t SetPlayoutDevice(uint16_t index) override;
+ int32_t SetPlayoutDevice(WindowsDeviceType device) override;
+ int32_t SetRecordingDevice(uint16_t index) override;
+ int32_t SetRecordingDevice(WindowsDeviceType device) override;
+
+ // Audio transport initialization
+ int32_t PlayoutIsAvailable(bool* available) override;
+ int32_t InitPlayout() override;
+ bool PlayoutIsInitialized() const override;
+ int32_t RecordingIsAvailable(bool* available) override;
+ int32_t InitRecording() override;
+ bool RecordingIsInitialized() const override;
+
+ // Audio transport control
+ int32_t StartPlayout() override;
+ int32_t StopPlayout() override;
+ bool Playing() const override;
+ int32_t StartRecording() override;
+ int32_t StopRecording() override;
+ bool Recording() const override;
+
+ // Audio mixer initialization
+ int32_t InitSpeaker() override;
+ bool SpeakerIsInitialized() const override;
+ int32_t InitMicrophone() override;
+ bool MicrophoneIsInitialized() const override;
+
+ // Speaker volume controls
+ int32_t SpeakerVolumeIsAvailable(bool* available) override;
+ int32_t SetSpeakerVolume(uint32_t volume) override;
+ int32_t SpeakerVolume(uint32_t* volume) const override;
+ int32_t MaxSpeakerVolume(uint32_t* maxVolume) const override;
+ int32_t MinSpeakerVolume(uint32_t* minVolume) const override;
+
+ // Microphone volume controls
+ int32_t MicrophoneVolumeIsAvailable(bool* available) override;
+ int32_t SetMicrophoneVolume(uint32_t volume) override;
+ int32_t MicrophoneVolume(uint32_t* volume) const override;
+ int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const override;
+ int32_t MinMicrophoneVolume(uint32_t* minVolume) const override;
+
+ // Speaker mute control
+ int32_t SpeakerMuteIsAvailable(bool* available) override;
+ int32_t SetSpeakerMute(bool enable) override;
+ int32_t SpeakerMute(bool* enabled) const override;
+
+ // Microphone mute control
+ int32_t MicrophoneMuteIsAvailable(bool* available) override;
+ int32_t SetMicrophoneMute(bool enable) override;
+ int32_t MicrophoneMute(bool* enabled) const override;
+
+ // Stereo support
+ int32_t StereoPlayoutIsAvailable(bool* available) const override;
+ int32_t SetStereoPlayout(bool enable) override;
+ int32_t StereoPlayout(bool* enabled) const override;
+ int32_t StereoRecordingIsAvailable(bool* available) const override;
+ int32_t SetStereoRecording(bool enable) override;
+ int32_t StereoRecording(bool* enabled) const override;
+
+ // Delay information and control
+ int32_t PlayoutDelay(uint16_t* delayMS) const override;
+
+ bool BuiltInAECIsAvailable() const override;
+ int32_t EnableBuiltInAEC(bool enable) override;
+ bool BuiltInAGCIsAvailable() const override;
+ int32_t EnableBuiltInAGC(bool enable) override;
+ bool BuiltInNSIsAvailable() const override;
+ int32_t EnableBuiltInNS(bool enable) override;
+
+ int32_t GetPlayoutUnderrunCount() const override;
+
+#if defined(WEBRTC_IOS)
+ int GetPlayoutAudioParameters(AudioParameters* params) const override;
+ int GetRecordAudioParameters(AudioParameters* params) const override;
+#endif // WEBRTC_IOS
+ private:
+ const bool bypass_voice_processing_;
+ bool initialized_ = false;
+ const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ std::unique_ptr<AudioDeviceIOS> audio_device_;
+ std::unique_ptr<AudioDeviceBuffer> audio_device_buffer_;
+};
+} // namespace ios_adm
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_DEVICE_MODULE_IOS_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.mm b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.mm
new file mode 100644
index 0000000000..5effef3abd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_device_module_ios.mm
@@ -0,0 +1,669 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_module_ios.h"
+
+#include "api/task_queue/default_task_queue_factory.h"
+#include "modules/audio_device/audio_device_config.h"
+#include "modules/audio_device/audio_device_generic.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ref_count.h"
+#include "system_wrappers/include/metrics.h"
+
+#if defined(WEBRTC_IOS)
+#include "audio_device_ios.h"
+#endif
+
+#define CHECKinitialized_() \
+ { \
+ if (!initialized_) { \
+ return -1; \
+ }; \
+ }
+
+#define CHECKinitialized__BOOL() \
+ { \
+ if (!initialized_) { \
+ return false; \
+ }; \
+ }
+
+namespace webrtc {
+namespace ios_adm {
+
+AudioDeviceModuleIOS::AudioDeviceModuleIOS(bool bypass_voice_processing)
+ : bypass_voice_processing_(bypass_voice_processing),
+ task_queue_factory_(CreateDefaultTaskQueueFactory()) {
+ RTC_LOG(LS_INFO) << "current platform is IOS";
+ RTC_LOG(LS_INFO) << "iPhone Audio APIs will be utilized.";
+}
+
+ int32_t AudioDeviceModuleIOS::AttachAudioBuffer() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ audio_device_->AttachAudioBuffer(audio_device_buffer_.get());
+ return 0;
+ }
+
+ AudioDeviceModuleIOS::~AudioDeviceModuleIOS() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ }
+
+ int32_t AudioDeviceModuleIOS::ActiveAudioLayer(AudioLayer* audioLayer) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ AudioLayer activeAudio;
+ if (audio_device_->ActiveAudioLayer(activeAudio) == -1) {
+ return -1;
+ }
+ *audioLayer = activeAudio;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::Init() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (initialized_)
+ return 0;
+
+ audio_device_buffer_.reset(new webrtc::AudioDeviceBuffer(task_queue_factory_.get()));
+ audio_device_.reset(new ios_adm::AudioDeviceIOS(bypass_voice_processing_));
+ RTC_CHECK(audio_device_);
+
+ this->AttachAudioBuffer();
+
+ AudioDeviceGeneric::InitStatus status = audio_device_->Init();
+ RTC_HISTOGRAM_ENUMERATION(
+ "WebRTC.Audio.InitializationResult", static_cast<int>(status),
+ static_cast<int>(AudioDeviceGeneric::InitStatus::NUM_STATUSES));
+ if (status != AudioDeviceGeneric::InitStatus::OK) {
+ RTC_LOG(LS_ERROR) << "Audio device initialization failed.";
+ return -1;
+ }
+ initialized_ = true;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::Terminate() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return 0;
+ if (audio_device_->Terminate() == -1) {
+ return -1;
+ }
+ initialized_ = false;
+ return 0;
+ }
+
+ bool AudioDeviceModuleIOS::Initialized() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << ": " << initialized_;
+ return initialized_;
+ }
+
+ int32_t AudioDeviceModuleIOS::InitSpeaker() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ return audio_device_->InitSpeaker();
+ }
+
+ int32_t AudioDeviceModuleIOS::InitMicrophone() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ return audio_device_->InitMicrophone();
+ }
+
+ int32_t AudioDeviceModuleIOS::SpeakerVolumeIsAvailable(bool* available) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->SpeakerVolumeIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::SetSpeakerVolume(uint32_t volume) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << volume << ")";
+ CHECKinitialized_();
+ return audio_device_->SetSpeakerVolume(volume);
+ }
+
+ int32_t AudioDeviceModuleIOS::SpeakerVolume(uint32_t* volume) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ uint32_t level = 0;
+ if (audio_device_->SpeakerVolume(level) == -1) {
+ return -1;
+ }
+ *volume = level;
+ RTC_DLOG(LS_INFO) << "output: " << *volume;
+ return 0;
+ }
+
+ bool AudioDeviceModuleIOS::SpeakerIsInitialized() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ bool isInitialized = audio_device_->SpeakerIsInitialized();
+ RTC_DLOG(LS_INFO) << "output: " << isInitialized;
+ return isInitialized;
+ }
+
+ bool AudioDeviceModuleIOS::MicrophoneIsInitialized() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ bool isInitialized = audio_device_->MicrophoneIsInitialized();
+ RTC_DLOG(LS_INFO) << "output: " << isInitialized;
+ return isInitialized;
+ }
+
+ int32_t AudioDeviceModuleIOS::MaxSpeakerVolume(uint32_t* maxVolume) const {
+ CHECKinitialized_();
+ uint32_t maxVol = 0;
+ if (audio_device_->MaxSpeakerVolume(maxVol) == -1) {
+ return -1;
+ }
+ *maxVolume = maxVol;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::MinSpeakerVolume(uint32_t* minVolume) const {
+ CHECKinitialized_();
+ uint32_t minVol = 0;
+ if (audio_device_->MinSpeakerVolume(minVol) == -1) {
+ return -1;
+ }
+ *minVolume = minVol;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::SpeakerMuteIsAvailable(bool* available) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->SpeakerMuteIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::SetSpeakerMute(bool enable) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ CHECKinitialized_();
+ return audio_device_->SetSpeakerMute(enable);
+ }
+
+ int32_t AudioDeviceModuleIOS::SpeakerMute(bool* enabled) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool muted = false;
+ if (audio_device_->SpeakerMute(muted) == -1) {
+ return -1;
+ }
+ *enabled = muted;
+ RTC_DLOG(LS_INFO) << "output: " << muted;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::MicrophoneMuteIsAvailable(bool* available) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->MicrophoneMuteIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::SetMicrophoneMute(bool enable) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ CHECKinitialized_();
+ return (audio_device_->SetMicrophoneMute(enable));
+ }
+
+ int32_t AudioDeviceModuleIOS::MicrophoneMute(bool* enabled) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool muted = false;
+ if (audio_device_->MicrophoneMute(muted) == -1) {
+ return -1;
+ }
+ *enabled = muted;
+ RTC_DLOG(LS_INFO) << "output: " << muted;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::MicrophoneVolumeIsAvailable(bool* available) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->MicrophoneVolumeIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::SetMicrophoneVolume(uint32_t volume) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << volume << ")";
+ CHECKinitialized_();
+ return (audio_device_->SetMicrophoneVolume(volume));
+ }
+
+ int32_t AudioDeviceModuleIOS::MicrophoneVolume(uint32_t* volume) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ uint32_t level = 0;
+ if (audio_device_->MicrophoneVolume(level) == -1) {
+ return -1;
+ }
+ *volume = level;
+ RTC_DLOG(LS_INFO) << "output: " << *volume;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::StereoRecordingIsAvailable(
+ bool* available) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->StereoRecordingIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::SetStereoRecording(bool enable) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ CHECKinitialized_();
+ if (enable) {
+ RTC_LOG(LS_WARNING) << "recording in stereo is not supported";
+ }
+ return -1;
+ }
+
+ int32_t AudioDeviceModuleIOS::StereoRecording(bool* enabled) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool stereo = false;
+ if (audio_device_->StereoRecording(stereo) == -1) {
+ return -1;
+ }
+ *enabled = stereo;
+ RTC_DLOG(LS_INFO) << "output: " << stereo;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::StereoPlayoutIsAvailable(bool* available) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->StereoPlayoutIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::SetStereoPlayout(bool enable) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ CHECKinitialized_();
+ if (audio_device_->PlayoutIsInitialized()) {
+ RTC_LOG(LS_ERROR) << "unable to set stereo mode while playing side is initialized";
+ return -1;
+ }
+ if (audio_device_->SetStereoPlayout(enable)) {
+ RTC_LOG(LS_WARNING) << "stereo playout is not supported";
+ return -1;
+ }
+ int8_t nChannels(1);
+ if (enable) {
+ nChannels = 2;
+ }
+ audio_device_buffer_.get()->SetPlayoutChannels(nChannels);
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::StereoPlayout(bool* enabled) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool stereo = false;
+ if (audio_device_->StereoPlayout(stereo) == -1) {
+ return -1;
+ }
+ *enabled = stereo;
+ RTC_DLOG(LS_INFO) << "output: " << stereo;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::PlayoutIsAvailable(bool* available) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->PlayoutIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::RecordingIsAvailable(bool* available) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ bool isAvailable = false;
+ if (audio_device_->RecordingIsAvailable(isAvailable) == -1) {
+ return -1;
+ }
+ *available = isAvailable;
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::MaxMicrophoneVolume(uint32_t* maxVolume) const {
+ CHECKinitialized_();
+ uint32_t maxVol(0);
+ if (audio_device_->MaxMicrophoneVolume(maxVol) == -1) {
+ return -1;
+ }
+ *maxVolume = maxVol;
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::MinMicrophoneVolume(uint32_t* minVolume) const {
+ CHECKinitialized_();
+ uint32_t minVol(0);
+ if (audio_device_->MinMicrophoneVolume(minVol) == -1) {
+ return -1;
+ }
+ *minVolume = minVol;
+ return 0;
+ }
+
+ int16_t AudioDeviceModuleIOS::PlayoutDevices() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ uint16_t nPlayoutDevices = audio_device_->PlayoutDevices();
+ RTC_DLOG(LS_INFO) << "output: " << nPlayoutDevices;
+ return (int16_t)(nPlayoutDevices);
+ }
+
+ int32_t AudioDeviceModuleIOS::SetPlayoutDevice(uint16_t index) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ")";
+ CHECKinitialized_();
+ return audio_device_->SetPlayoutDevice(index);
+ }
+
+ int32_t AudioDeviceModuleIOS::SetPlayoutDevice(WindowsDeviceType device) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ return audio_device_->SetPlayoutDevice(device);
+ }
+
+ int32_t AudioDeviceModuleIOS::PlayoutDeviceName(
+ uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ", ...)";
+ CHECKinitialized_();
+ if (name == NULL) {
+ return -1;
+ }
+ if (audio_device_->PlayoutDeviceName(index, name, guid) == -1) {
+ return -1;
+ }
+ if (name != NULL) {
+ RTC_DLOG(LS_INFO) << "output: name = " << name;
+ }
+ if (guid != NULL) {
+ RTC_DLOG(LS_INFO) << "output: guid = " << guid;
+ }
+ return 0;
+ }
+
+ int32_t AudioDeviceModuleIOS::RecordingDeviceName(
+ uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ", ...)";
+ CHECKinitialized_();
+ if (name == NULL) {
+ return -1;
+ }
+ if (audio_device_->RecordingDeviceName(index, name, guid) == -1) {
+ return -1;
+ }
+ if (name != NULL) {
+ RTC_DLOG(LS_INFO) << "output: name = " << name;
+ }
+ if (guid != NULL) {
+ RTC_DLOG(LS_INFO) << "output: guid = " << guid;
+ }
+ return 0;
+ }
+
+ int16_t AudioDeviceModuleIOS::RecordingDevices() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ uint16_t nRecordingDevices = audio_device_->RecordingDevices();
+ RTC_DLOG(LS_INFO) << "output: " << nRecordingDevices;
+ return (int16_t)nRecordingDevices;
+ }
+
+ int32_t AudioDeviceModuleIOS::SetRecordingDevice(uint16_t index) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ")";
+ CHECKinitialized_();
+ return audio_device_->SetRecordingDevice(index);
+ }
+
+ int32_t AudioDeviceModuleIOS::SetRecordingDevice(WindowsDeviceType device) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ return audio_device_->SetRecordingDevice(device);
+ }
+
+ int32_t AudioDeviceModuleIOS::InitPlayout() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ if (PlayoutIsInitialized()) {
+ return 0;
+ }
+ int32_t result = audio_device_->InitPlayout();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitPlayoutSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ int32_t AudioDeviceModuleIOS::InitRecording() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ if (RecordingIsInitialized()) {
+ return 0;
+ }
+ int32_t result = audio_device_->InitRecording();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitRecordingSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ bool AudioDeviceModuleIOS::PlayoutIsInitialized() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ return audio_device_->PlayoutIsInitialized();
+ }
+
+ bool AudioDeviceModuleIOS::RecordingIsInitialized() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ return audio_device_->RecordingIsInitialized();
+ }
+
+ int32_t AudioDeviceModuleIOS::StartPlayout() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ if (Playing()) {
+ return 0;
+ }
+ audio_device_buffer_.get()->StartPlayout();
+ int32_t result = audio_device_->StartPlayout();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartPlayoutSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ int32_t AudioDeviceModuleIOS::StopPlayout() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ int32_t result = audio_device_->StopPlayout();
+ audio_device_buffer_.get()->StopPlayout();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopPlayoutSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ bool AudioDeviceModuleIOS::Playing() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ return audio_device_->Playing();
+ }
+
+ int32_t AudioDeviceModuleIOS::StartRecording() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ if (Recording()) {
+ return 0;
+ }
+ audio_device_buffer_.get()->StartRecording();
+ int32_t result = audio_device_->StartRecording();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartRecordingSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ int32_t AudioDeviceModuleIOS::StopRecording() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized_();
+ int32_t result = audio_device_->StopRecording();
+ audio_device_buffer_.get()->StopRecording();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopRecordingSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ bool AudioDeviceModuleIOS::Recording() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ return audio_device_->Recording();
+ }
+
+ int32_t AudioDeviceModuleIOS::RegisterAudioCallback(
+ AudioTransport* audioCallback) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return audio_device_buffer_.get()->RegisterAudioCallback(audioCallback);
+ }
+
+ int32_t AudioDeviceModuleIOS::PlayoutDelay(uint16_t* delayMS) const {
+ CHECKinitialized_();
+ uint16_t delay = 0;
+ if (audio_device_->PlayoutDelay(delay) == -1) {
+ RTC_LOG(LS_ERROR) << "failed to retrieve the playout delay";
+ return -1;
+ }
+ *delayMS = delay;
+ return 0;
+ }
+
+ bool AudioDeviceModuleIOS::BuiltInAECIsAvailable() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ bool isAvailable = audio_device_->BuiltInAECIsAvailable();
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return isAvailable;
+ }
+
+ int32_t AudioDeviceModuleIOS::EnableBuiltInAEC(bool enable) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ CHECKinitialized_();
+ int32_t ok = audio_device_->EnableBuiltInAEC(enable);
+ RTC_DLOG(LS_INFO) << "output: " << ok;
+ return ok;
+ }
+
+ bool AudioDeviceModuleIOS::BuiltInAGCIsAvailable() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ bool isAvailable = audio_device_->BuiltInAGCIsAvailable();
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return isAvailable;
+ }
+
+ int32_t AudioDeviceModuleIOS::EnableBuiltInAGC(bool enable) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ CHECKinitialized_();
+ int32_t ok = audio_device_->EnableBuiltInAGC(enable);
+ RTC_DLOG(LS_INFO) << "output: " << ok;
+ return ok;
+ }
+
+ bool AudioDeviceModuleIOS::BuiltInNSIsAvailable() const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ CHECKinitialized__BOOL();
+ bool isAvailable = audio_device_->BuiltInNSIsAvailable();
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return isAvailable;
+ }
+
+ int32_t AudioDeviceModuleIOS::EnableBuiltInNS(bool enable) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ CHECKinitialized_();
+ int32_t ok = audio_device_->EnableBuiltInNS(enable);
+ RTC_DLOG(LS_INFO) << "output: " << ok;
+ return ok;
+ }
+
+ int32_t AudioDeviceModuleIOS::GetPlayoutUnderrunCount() const {
+ // Don't log here, as this method can be called very often.
+ CHECKinitialized_();
+ int32_t ok = audio_device_->GetPlayoutUnderrunCount();
+ return ok;
+ }
+
+#if defined(WEBRTC_IOS)
+ int AudioDeviceModuleIOS::GetPlayoutAudioParameters(
+ AudioParameters* params) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ int r = audio_device_->GetPlayoutAudioParameters(params);
+ RTC_DLOG(LS_INFO) << "output: " << r;
+ return r;
+ }
+
+ int AudioDeviceModuleIOS::GetRecordAudioParameters(
+ AudioParameters* params) const {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ int r = audio_device_->GetRecordAudioParameters(params);
+ RTC_DLOG(LS_INFO) << "output: " << r;
+ return r;
+ }
+#endif // WEBRTC_IOS
+}
+}
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/audio_session_observer.h b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_session_observer.h
new file mode 100644
index 0000000000..f7c44c8184
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/audio_session_observer.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_SESSION_OBSERVER_H_
+#define SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_SESSION_OBSERVER_H_
+
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+
+// Observer interface for listening to AVAudioSession events.
+class AudioSessionObserver {
+ public:
+ // Called when audio session interruption begins.
+ virtual void OnInterruptionBegin() = 0;
+
+ // Called when audio session interruption ends.
+ virtual void OnInterruptionEnd() = 0;
+
+ // Called when audio route changes.
+ virtual void OnValidRouteChange() = 0;
+
+ // Called when the ability to play or record changes.
+ virtual void OnCanPlayOrRecordChange(bool can_play_or_record) = 0;
+
+ virtual void OnChangedOutputVolume() = 0;
+
+ protected:
+ virtual ~AudioSessionObserver() {}
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_SESSION_OBSERVER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/helpers.h b/third_party/libwebrtc/sdk/objc/native/src/audio/helpers.h
new file mode 100644
index 0000000000..ac86258a5e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/helpers.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_AUDIO_HELPERS_H_
+#define SDK_OBJC_NATIVE_SRC_AUDIO_HELPERS_H_
+
+#include <string>
+
+namespace webrtc {
+namespace ios {
+
+bool CheckAndLogError(BOOL success, NSError* error);
+
+NSString* NSStringFromStdString(const std::string& stdString);
+std::string StdStringFromNSString(NSString* nsString);
+
+// Return thread ID as a string.
+std::string GetThreadId();
+
+// Return thread ID as string suitable for debug logging.
+std::string GetThreadInfo();
+
+// Returns [NSThread currentThread] description as string.
+// Example: <NSThread: 0x170066d80>{number = 1, name = main}
+std::string GetCurrentThreadDescription();
+
+#if defined(WEBRTC_IOS)
+// Returns the current name of the operating system.
+std::string GetSystemName();
+
+// Returns the current version of the operating system as a string.
+std::string GetSystemVersionAsString();
+
+// Returns the version of the operating system in double representation.
+// Uses a cached value of the system version.
+double GetSystemVersion();
+
+// Returns the device type.
+// Examples: ”iPhone” and ”iPod touch”.
+std::string GetDeviceType();
+#endif // defined(WEBRTC_IOS)
+
+// Returns a more detailed device name.
+// Examples: "iPhone 5s (GSM)" and "iPhone 6 Plus".
+std::string GetDeviceName();
+
+// Returns the name of the process. Does not uniquely identify the process.
+std::string GetProcessName();
+
+// Returns the identifier of the process (often called process ID).
+int GetProcessID();
+
+// Returns a string containing the version of the operating system on which the
+// process is executing. The string is string is human readable, localized, and
+// is appropriate for displaying to the user.
+std::string GetOSVersionString();
+
+// Returns the number of processing cores available on the device.
+int GetProcessorCount();
+
+#if defined(WEBRTC_IOS)
+// Indicates whether Low Power Mode is enabled on the iOS device.
+bool GetLowPowerModeEnabled();
+#endif
+
+} // namespace ios
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_AUDIO_HELPERS_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/helpers.mm b/third_party/libwebrtc/sdk/objc/native/src/audio/helpers.mm
new file mode 100644
index 0000000000..cd0469656a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/helpers.mm
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <sys/sysctl.h>
+#if defined(WEBRTC_IOS)
+#import <UIKit/UIKit.h>
+#endif
+
+#include <memory>
+
+#include "helpers.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace ios {
+
+NSString* NSStringFromStdString(const std::string& stdString) {
+ // std::string may contain null termination character so we construct
+ // using length.
+ return [[NSString alloc] initWithBytes:stdString.data()
+ length:stdString.length()
+ encoding:NSUTF8StringEncoding];
+}
+
+std::string StdStringFromNSString(NSString* nsString) {
+ NSData* charData = [nsString dataUsingEncoding:NSUTF8StringEncoding];
+ return std::string(reinterpret_cast<const char*>([charData bytes]),
+ [charData length]);
+}
+
+bool CheckAndLogError(BOOL success, NSError* error) {
+ if (!success) {
+ NSString* msg =
+ [NSString stringWithFormat:@"Error: %ld, %@, %@", (long)error.code,
+ error.localizedDescription,
+ error.localizedFailureReason];
+ RTC_LOG(LS_ERROR) << StdStringFromNSString(msg);
+ return false;
+ }
+ return true;
+}
+
+// TODO(henrika): see if it is possible to move to GetThreadName in
+// platform_thread.h and base it on pthread methods instead.
+std::string GetCurrentThreadDescription() {
+ NSString* name = [NSString stringWithFormat:@"%@", [NSThread currentThread]];
+ return StdStringFromNSString(name);
+}
+
+#if defined(WEBRTC_IOS)
+std::string GetSystemName() {
+ NSString* osName = [[UIDevice currentDevice] systemName];
+ return StdStringFromNSString(osName);
+}
+
+std::string GetSystemVersionAsString() {
+ NSString* osVersion = [[UIDevice currentDevice] systemVersion];
+ return StdStringFromNSString(osVersion);
+}
+
+std::string GetDeviceType() {
+ NSString* deviceModel = [[UIDevice currentDevice] model];
+ return StdStringFromNSString(deviceModel);
+}
+
+bool GetLowPowerModeEnabled() {
+ return [NSProcessInfo processInfo].lowPowerModeEnabled;
+}
+#endif
+
+std::string GetDeviceName() {
+ size_t size;
+ sysctlbyname("hw.machine", NULL, &size, NULL, 0);
+ std::unique_ptr<char[]> machine;
+ machine.reset(new char[size]);
+ sysctlbyname("hw.machine", machine.get(), &size, NULL, 0);
+ return std::string(machine.get());
+}
+
+std::string GetProcessName() {
+ NSString* processName = [NSProcessInfo processInfo].processName;
+ return StdStringFromNSString(processName);
+}
+
+int GetProcessID() {
+ return [NSProcessInfo processInfo].processIdentifier;
+}
+
+std::string GetOSVersionString() {
+ NSString* osVersion =
+ [NSProcessInfo processInfo].operatingSystemVersionString;
+ return StdStringFromNSString(osVersion);
+}
+
+int GetProcessorCount() {
+ return [NSProcessInfo processInfo].processorCount;
+}
+
+} // namespace ios
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.h b/third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.h
new file mode 100644
index 0000000000..ed9dd98568
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.h
@@ -0,0 +1,141 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_AUDIO_VOICE_PROCESSING_AUDIO_UNIT_H_
+#define SDK_OBJC_NATIVE_SRC_AUDIO_VOICE_PROCESSING_AUDIO_UNIT_H_
+
+#include <AudioUnit/AudioUnit.h>
+
+namespace webrtc {
+namespace ios_adm {
+
+class VoiceProcessingAudioUnitObserver {
+ public:
+ // Callback function called on a real-time priority I/O thread from the audio
+ // unit. This method is used to signal that recorded audio is available.
+ virtual OSStatus OnDeliverRecordedData(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) = 0;
+
+ // Callback function called on a real-time priority I/O thread from the audio
+ // unit. This method is used to provide audio samples to the audio unit.
+ virtual OSStatus OnGetPlayoutData(AudioUnitRenderActionFlags* io_action_flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) = 0;
+
+ protected:
+ ~VoiceProcessingAudioUnitObserver() {}
+};
+
+// Convenience class to abstract away the management of a Voice Processing
+// I/O Audio Unit. The Voice Processing I/O unit has the same characteristics
+// as the Remote I/O unit (supports full duplex low-latency audio input and
+// output) and adds AEC for for two-way duplex communication. It also adds AGC,
+// adjustment of voice-processing quality, and muting. Hence, ideal for
+// VoIP applications.
+class VoiceProcessingAudioUnit {
+ public:
+ VoiceProcessingAudioUnit(bool bypass_voice_processing,
+ VoiceProcessingAudioUnitObserver* observer);
+ ~VoiceProcessingAudioUnit();
+
+ // TODO(tkchin): enum for state and state checking.
+ enum State : int32_t {
+ // Init() should be called.
+ kInitRequired,
+ // Audio unit created but not initialized.
+ kUninitialized,
+ // Initialized but not started. Equivalent to stopped.
+ kInitialized,
+ // Initialized and started.
+ kStarted,
+ };
+
+ // Number of bytes per audio sample for 16-bit signed integer representation.
+ static const UInt32 kBytesPerSample;
+
+ // Initializes this class by creating the underlying audio unit instance.
+ // Creates a Voice-Processing I/O unit and configures it for full-duplex
+ // audio. The selected stream format is selected to avoid internal resampling
+ // and to match the 10ms callback rate for WebRTC as well as possible.
+ // Does not intialize the audio unit.
+ bool Init();
+
+ VoiceProcessingAudioUnit::State GetState() const;
+
+ // Initializes the underlying audio unit with the given sample rate.
+ bool Initialize(Float64 sample_rate);
+
+ // Starts the underlying audio unit.
+ OSStatus Start();
+
+ // Stops the underlying audio unit.
+ bool Stop();
+
+ // Uninitializes the underlying audio unit.
+ bool Uninitialize();
+
+ // Calls render on the underlying audio unit.
+ OSStatus Render(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 output_bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data);
+
+ private:
+ // The C API used to set callbacks requires static functions. When these are
+ // called, they will invoke the relevant instance method by casting
+ // in_ref_con to VoiceProcessingAudioUnit*.
+ static OSStatus OnGetPlayoutData(void* in_ref_con,
+ AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data);
+ static OSStatus OnDeliverRecordedData(void* in_ref_con,
+ AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data);
+
+ // Notifies observer that samples are needed for playback.
+ OSStatus NotifyGetPlayoutData(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data);
+ // Notifies observer that recorded samples are available for render.
+ OSStatus NotifyDeliverRecordedData(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data);
+
+ // Returns the predetermined format with a specific sample rate. See
+ // implementation file for details on format.
+ AudioStreamBasicDescription GetFormat(Float64 sample_rate) const;
+
+ // Deletes the underlying audio unit.
+ void DisposeAudioUnit();
+
+ const bool bypass_voice_processing_;
+ VoiceProcessingAudioUnitObserver* observer_;
+ AudioUnit vpio_unit_;
+ VoiceProcessingAudioUnit::State state_;
+};
+} // namespace ios_adm
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_AUDIO_VOICE_PROCESSING_AUDIO_UNIT_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.mm b/third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.mm
new file mode 100644
index 0000000000..3905b6857a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/audio/voice_processing_audio_unit.mm
@@ -0,0 +1,488 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "voice_processing_audio_unit.h"
+
+#include "rtc_base/checks.h"
+#include "system_wrappers/include/metrics.h"
+
+#import "base/RTCLogging.h"
+#import "sdk/objc/components/audio/RTCAudioSessionConfiguration.h"
+
+#if !defined(NDEBUG)
+static void LogStreamDescription(AudioStreamBasicDescription description) {
+ char formatIdString[5];
+ UInt32 formatId = CFSwapInt32HostToBig(description.mFormatID);
+ bcopy(&formatId, formatIdString, 4);
+ formatIdString[4] = '\0';
+ RTCLog(@"AudioStreamBasicDescription: {\n"
+ " mSampleRate: %.2f\n"
+ " formatIDString: %s\n"
+ " mFormatFlags: 0x%X\n"
+ " mBytesPerPacket: %u\n"
+ " mFramesPerPacket: %u\n"
+ " mBytesPerFrame: %u\n"
+ " mChannelsPerFrame: %u\n"
+ " mBitsPerChannel: %u\n"
+ " mReserved: %u\n}",
+ description.mSampleRate, formatIdString,
+ static_cast<unsigned int>(description.mFormatFlags),
+ static_cast<unsigned int>(description.mBytesPerPacket),
+ static_cast<unsigned int>(description.mFramesPerPacket),
+ static_cast<unsigned int>(description.mBytesPerFrame),
+ static_cast<unsigned int>(description.mChannelsPerFrame),
+ static_cast<unsigned int>(description.mBitsPerChannel),
+ static_cast<unsigned int>(description.mReserved));
+}
+#endif
+
+namespace webrtc {
+namespace ios_adm {
+
+// Calls to AudioUnitInitialize() can fail if called back-to-back on different
+// ADM instances. A fall-back solution is to allow multiple sequential calls
+// with as small delay between each. This factor sets the max number of allowed
+// initialization attempts.
+static const int kMaxNumberOfAudioUnitInitializeAttempts = 5;
+// A VP I/O unit's bus 1 connects to input hardware (microphone).
+static const AudioUnitElement kInputBus = 1;
+// A VP I/O unit's bus 0 connects to output hardware (speaker).
+static const AudioUnitElement kOutputBus = 0;
+
+// Returns the automatic gain control (AGC) state on the processed microphone
+// signal. Should be on by default for Voice Processing audio units.
+static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) {
+ RTC_DCHECK(audio_unit);
+ UInt32 size = sizeof(*enabled);
+ OSStatus result = AudioUnitGetProperty(audio_unit,
+ kAUVoiceIOProperty_VoiceProcessingEnableAGC,
+ kAudioUnitScope_Global,
+ kInputBus,
+ enabled,
+ &size);
+ RTCLog(@"VPIO unit AGC: %u", static_cast<unsigned int>(*enabled));
+ return result;
+}
+
+VoiceProcessingAudioUnit::VoiceProcessingAudioUnit(bool bypass_voice_processing,
+ VoiceProcessingAudioUnitObserver* observer)
+ : bypass_voice_processing_(bypass_voice_processing),
+ observer_(observer),
+ vpio_unit_(nullptr),
+ state_(kInitRequired) {
+ RTC_DCHECK(observer);
+}
+
+VoiceProcessingAudioUnit::~VoiceProcessingAudioUnit() {
+ DisposeAudioUnit();
+}
+
+const UInt32 VoiceProcessingAudioUnit::kBytesPerSample = 2;
+
+bool VoiceProcessingAudioUnit::Init() {
+ RTC_DCHECK_EQ(state_, kInitRequired);
+
+ // Create an audio component description to identify the Voice Processing
+ // I/O audio unit.
+ AudioComponentDescription vpio_unit_description;
+ vpio_unit_description.componentType = kAudioUnitType_Output;
+ vpio_unit_description.componentSubType = kAudioUnitSubType_VoiceProcessingIO;
+ vpio_unit_description.componentManufacturer = kAudioUnitManufacturer_Apple;
+ vpio_unit_description.componentFlags = 0;
+ vpio_unit_description.componentFlagsMask = 0;
+
+ // Obtain an audio unit instance given the description.
+ AudioComponent found_vpio_unit_ref =
+ AudioComponentFindNext(nullptr, &vpio_unit_description);
+
+ // Create a Voice Processing IO audio unit.
+ OSStatus result = noErr;
+ result = AudioComponentInstanceNew(found_vpio_unit_ref, &vpio_unit_);
+ if (result != noErr) {
+ vpio_unit_ = nullptr;
+ RTCLogError(@"AudioComponentInstanceNew failed. Error=%ld.", (long)result);
+ return false;
+ }
+
+ // Enable input on the input scope of the input element.
+ UInt32 enable_input = 1;
+ result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO,
+ kAudioUnitScope_Input, kInputBus, &enable_input,
+ sizeof(enable_input));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ RTCLogError(@"Failed to enable input on input scope of input element. "
+ "Error=%ld.",
+ (long)result);
+ return false;
+ }
+
+ // Enable output on the output scope of the output element.
+ UInt32 enable_output = 1;
+ result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO,
+ kAudioUnitScope_Output, kOutputBus,
+ &enable_output, sizeof(enable_output));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ RTCLogError(@"Failed to enable output on output scope of output element. "
+ "Error=%ld.",
+ (long)result);
+ return false;
+ }
+
+ // Specify the callback function that provides audio samples to the audio
+ // unit.
+ AURenderCallbackStruct render_callback;
+ render_callback.inputProc = OnGetPlayoutData;
+ render_callback.inputProcRefCon = this;
+ result = AudioUnitSetProperty(
+ vpio_unit_, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input,
+ kOutputBus, &render_callback, sizeof(render_callback));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ RTCLogError(@"Failed to specify the render callback on the output bus. "
+ "Error=%ld.",
+ (long)result);
+ return false;
+ }
+
+ // Disable AU buffer allocation for the recorder, we allocate our own.
+ // TODO(henrika): not sure that it actually saves resource to make this call.
+ UInt32 flag = 0;
+ result = AudioUnitSetProperty(
+ vpio_unit_, kAudioUnitProperty_ShouldAllocateBuffer,
+ kAudioUnitScope_Output, kInputBus, &flag, sizeof(flag));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ RTCLogError(@"Failed to disable buffer allocation on the input bus. "
+ "Error=%ld.",
+ (long)result);
+ return false;
+ }
+
+ // Specify the callback to be called by the I/O thread to us when input audio
+ // is available. The recorded samples can then be obtained by calling the
+ // AudioUnitRender() method.
+ AURenderCallbackStruct input_callback;
+ input_callback.inputProc = OnDeliverRecordedData;
+ input_callback.inputProcRefCon = this;
+ result = AudioUnitSetProperty(vpio_unit_,
+ kAudioOutputUnitProperty_SetInputCallback,
+ kAudioUnitScope_Global, kInputBus,
+ &input_callback, sizeof(input_callback));
+ if (result != noErr) {
+ DisposeAudioUnit();
+ RTCLogError(@"Failed to specify the input callback on the input bus. "
+ "Error=%ld.",
+ (long)result);
+ return false;
+ }
+
+ state_ = kUninitialized;
+ return true;
+}
+
+VoiceProcessingAudioUnit::State VoiceProcessingAudioUnit::GetState() const {
+ return state_;
+}
+
+bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) {
+ RTC_DCHECK_GE(state_, kUninitialized);
+ RTCLog(@"Initializing audio unit with sample rate: %f", sample_rate);
+
+ OSStatus result = noErr;
+ AudioStreamBasicDescription format = GetFormat(sample_rate);
+ UInt32 size = sizeof(format);
+#if !defined(NDEBUG)
+ LogStreamDescription(format);
+#endif
+
+ // Set the format on the output scope of the input element/bus.
+ result =
+ AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
+ kAudioUnitScope_Output, kInputBus, &format, size);
+ if (result != noErr) {
+ RTCLogError(@"Failed to set format on output scope of input bus. "
+ "Error=%ld.",
+ (long)result);
+ return false;
+ }
+
+ // Set the format on the input scope of the output element/bus.
+ result =
+ AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
+ kAudioUnitScope_Input, kOutputBus, &format, size);
+ if (result != noErr) {
+ RTCLogError(@"Failed to set format on input scope of output bus. "
+ "Error=%ld.",
+ (long)result);
+ return false;
+ }
+
+ // Initialize the Voice Processing I/O unit instance.
+ // Calls to AudioUnitInitialize() can fail if called back-to-back on
+ // different ADM instances. The error message in this case is -66635 which is
+ // undocumented. Tests have shown that calling AudioUnitInitialize a second
+ // time, after a short sleep, avoids this issue.
+ // See webrtc:5166 for details.
+ int failed_initalize_attempts = 0;
+ result = AudioUnitInitialize(vpio_unit_);
+ while (result != noErr) {
+ RTCLogError(@"Failed to initialize the Voice Processing I/O unit. "
+ "Error=%ld.",
+ (long)result);
+ ++failed_initalize_attempts;
+ if (failed_initalize_attempts == kMaxNumberOfAudioUnitInitializeAttempts) {
+ // Max number of initialization attempts exceeded, hence abort.
+ RTCLogError(@"Too many initialization attempts.");
+ return false;
+ }
+ RTCLog(@"Pause 100ms and try audio unit initialization again...");
+ [NSThread sleepForTimeInterval:0.1f];
+ result = AudioUnitInitialize(vpio_unit_);
+ }
+ if (result == noErr) {
+ RTCLog(@"Voice Processing I/O unit is now initialized.");
+ }
+
+ if (bypass_voice_processing_) {
+ // Attempt to disable builtin voice processing.
+ UInt32 toggle = 1;
+ result = AudioUnitSetProperty(vpio_unit_,
+ kAUVoiceIOProperty_BypassVoiceProcessing,
+ kAudioUnitScope_Global,
+ kInputBus,
+ &toggle,
+ sizeof(toggle));
+ if (result == noErr) {
+ RTCLog(@"Successfully bypassed voice processing.");
+ } else {
+ RTCLogError(@"Failed to bypass voice processing. Error=%ld.", (long)result);
+ }
+ state_ = kInitialized;
+ return true;
+ }
+
+ // AGC should be enabled by default for Voice Processing I/O units but it is
+ // checked below and enabled explicitly if needed. This scheme is used
+ // to be absolutely sure that the AGC is enabled since we have seen cases
+ // where only zeros are recorded and a disabled AGC could be one of the
+ // reasons why it happens.
+ int agc_was_enabled_by_default = 0;
+ UInt32 agc_is_enabled = 0;
+ result = GetAGCState(vpio_unit_, &agc_is_enabled);
+ if (result != noErr) {
+ RTCLogError(@"Failed to get AGC state (1st attempt). "
+ "Error=%ld.",
+ (long)result);
+ // Example of error code: kAudioUnitErr_NoConnection (-10876).
+ // All error codes related to audio units are negative and are therefore
+ // converted into a postive value to match the UMA APIs.
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(
+ "WebRTC.Audio.GetAGCStateErrorCode1", (-1) * result);
+ } else if (agc_is_enabled) {
+ // Remember that the AGC was enabled by default. Will be used in UMA.
+ agc_was_enabled_by_default = 1;
+ } else {
+ // AGC was initially disabled => try to enable it explicitly.
+ UInt32 enable_agc = 1;
+ result =
+ AudioUnitSetProperty(vpio_unit_,
+ kAUVoiceIOProperty_VoiceProcessingEnableAGC,
+ kAudioUnitScope_Global, kInputBus, &enable_agc,
+ sizeof(enable_agc));
+ if (result != noErr) {
+ RTCLogError(@"Failed to enable the built-in AGC. "
+ "Error=%ld.",
+ (long)result);
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(
+ "WebRTC.Audio.SetAGCStateErrorCode", (-1) * result);
+ }
+ result = GetAGCState(vpio_unit_, &agc_is_enabled);
+ if (result != noErr) {
+ RTCLogError(@"Failed to get AGC state (2nd attempt). "
+ "Error=%ld.",
+ (long)result);
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(
+ "WebRTC.Audio.GetAGCStateErrorCode2", (-1) * result);
+ }
+ }
+
+ // Track if the built-in AGC was enabled by default (as it should) or not.
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.BuiltInAGCWasEnabledByDefault",
+ agc_was_enabled_by_default);
+ RTCLog(@"WebRTC.Audio.BuiltInAGCWasEnabledByDefault: %d",
+ agc_was_enabled_by_default);
+ // As a final step, add an UMA histogram for tracking the AGC state.
+ // At this stage, the AGC should be enabled, and if it is not, more work is
+ // needed to find out the root cause.
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.BuiltInAGCIsEnabled", agc_is_enabled);
+ RTCLog(@"WebRTC.Audio.BuiltInAGCIsEnabled: %u",
+ static_cast<unsigned int>(agc_is_enabled));
+
+ state_ = kInitialized;
+ return true;
+}
+
+OSStatus VoiceProcessingAudioUnit::Start() {
+ RTC_DCHECK_GE(state_, kUninitialized);
+ RTCLog(@"Starting audio unit.");
+
+ OSStatus result = AudioOutputUnitStart(vpio_unit_);
+ if (result != noErr) {
+ RTCLogError(@"Failed to start audio unit. Error=%ld", (long)result);
+ return result;
+ } else {
+ RTCLog(@"Started audio unit");
+ }
+ state_ = kStarted;
+ return noErr;
+}
+
+bool VoiceProcessingAudioUnit::Stop() {
+ RTC_DCHECK_GE(state_, kUninitialized);
+ RTCLog(@"Stopping audio unit.");
+
+ OSStatus result = AudioOutputUnitStop(vpio_unit_);
+ if (result != noErr) {
+ RTCLogError(@"Failed to stop audio unit. Error=%ld", (long)result);
+ return false;
+ } else {
+ RTCLog(@"Stopped audio unit");
+ }
+
+ state_ = kInitialized;
+ return true;
+}
+
+bool VoiceProcessingAudioUnit::Uninitialize() {
+ RTC_DCHECK_GE(state_, kUninitialized);
+ RTCLog(@"Unintializing audio unit.");
+
+ OSStatus result = AudioUnitUninitialize(vpio_unit_);
+ if (result != noErr) {
+ RTCLogError(@"Failed to uninitialize audio unit. Error=%ld", (long)result);
+ return false;
+ } else {
+ RTCLog(@"Uninitialized audio unit.");
+ }
+
+ state_ = kUninitialized;
+ return true;
+}
+
+OSStatus VoiceProcessingAudioUnit::Render(AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 output_bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) {
+ RTC_DCHECK(vpio_unit_) << "Init() not called.";
+
+ OSStatus result = AudioUnitRender(vpio_unit_, flags, time_stamp,
+ output_bus_number, num_frames, io_data);
+ if (result != noErr) {
+ RTCLogError(@"Failed to render audio unit. Error=%ld", (long)result);
+ }
+ return result;
+}
+
+OSStatus VoiceProcessingAudioUnit::OnGetPlayoutData(
+ void* in_ref_con,
+ AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) {
+ VoiceProcessingAudioUnit* audio_unit =
+ static_cast<VoiceProcessingAudioUnit*>(in_ref_con);
+ return audio_unit->NotifyGetPlayoutData(flags, time_stamp, bus_number,
+ num_frames, io_data);
+}
+
+OSStatus VoiceProcessingAudioUnit::OnDeliverRecordedData(
+ void* in_ref_con,
+ AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) {
+ VoiceProcessingAudioUnit* audio_unit =
+ static_cast<VoiceProcessingAudioUnit*>(in_ref_con);
+ return audio_unit->NotifyDeliverRecordedData(flags, time_stamp, bus_number,
+ num_frames, io_data);
+}
+
+OSStatus VoiceProcessingAudioUnit::NotifyGetPlayoutData(
+ AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) {
+ return observer_->OnGetPlayoutData(flags, time_stamp, bus_number, num_frames,
+ io_data);
+}
+
+OSStatus VoiceProcessingAudioUnit::NotifyDeliverRecordedData(
+ AudioUnitRenderActionFlags* flags,
+ const AudioTimeStamp* time_stamp,
+ UInt32 bus_number,
+ UInt32 num_frames,
+ AudioBufferList* io_data) {
+ return observer_->OnDeliverRecordedData(flags, time_stamp, bus_number,
+ num_frames, io_data);
+}
+
+AudioStreamBasicDescription VoiceProcessingAudioUnit::GetFormat(
+ Float64 sample_rate) const {
+ // Set the application formats for input and output:
+ // - use same format in both directions
+ // - avoid resampling in the I/O unit by using the hardware sample rate
+ // - linear PCM => noncompressed audio data format with one frame per packet
+ // - no need to specify interleaving since only mono is supported
+ AudioStreamBasicDescription format;
+ RTC_DCHECK_EQ(1, kRTCAudioSessionPreferredNumberOfChannels);
+ format.mSampleRate = sample_rate;
+ format.mFormatID = kAudioFormatLinearPCM;
+ format.mFormatFlags =
+ kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
+ format.mBytesPerPacket = kBytesPerSample;
+ format.mFramesPerPacket = 1; // uncompressed.
+ format.mBytesPerFrame = kBytesPerSample;
+ format.mChannelsPerFrame = kRTCAudioSessionPreferredNumberOfChannels;
+ format.mBitsPerChannel = 8 * kBytesPerSample;
+ return format;
+}
+
+void VoiceProcessingAudioUnit::DisposeAudioUnit() {
+ if (vpio_unit_) {
+ switch (state_) {
+ case kStarted:
+ Stop();
+ [[fallthrough]];
+ case kInitialized:
+ Uninitialize();
+ break;
+ case kUninitialized:
+ case kInitRequired:
+ break;
+ }
+
+ RTCLog(@"Disposing audio unit.");
+ OSStatus result = AudioComponentInstanceDispose(vpio_unit_);
+ if (result != noErr) {
+ RTCLogError(@"AudioComponentInstanceDispose failed. Error=%ld.",
+ (long)result);
+ }
+ vpio_unit_ = nullptr;
+ }
+}
+
+} // namespace ios_adm
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/network_monitor_observer.h b/third_party/libwebrtc/sdk/objc/native/src/network_monitor_observer.h
new file mode 100644
index 0000000000..7c411a1db1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/network_monitor_observer.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_NETWORK_MONITOR_OBSERVER_H_
+#define SDK_OBJC_NATIVE_SRC_NETWORK_MONITOR_OBSERVER_H_
+
+#include <map>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/network_constants.h"
+#include "rtc_base/string_utils.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+
+// Observer interface for listening to NWPathMonitor updates.
+class NetworkMonitorObserver {
+ public:
+ // Called when a path update occurs, on network monitor dispatch queue.
+ //
+ // `adapter_type_by_name` is a map from interface name (i.e. "pdp_ip0") to
+ // adapter type, for all available interfaces on the current path. If an
+ // interface name isn't present it can be assumed to be unavailable.
+ virtual void OnPathUpdate(
+ std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp>
+ adapter_type_by_name) = 0;
+
+ protected:
+ virtual ~NetworkMonitorObserver() {}
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_SESSION_OBSERVER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.h b/third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.h
new file mode 100644
index 0000000000..9c1ff17876
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_OBJC_FRAME_BUFFER_H_
+#define SDK_OBJC_NATIVE_SRC_OBJC_FRAME_BUFFER_H_
+
+#import <CoreVideo/CoreVideo.h>
+
+#import "base/RTCMacros.h"
+
+#include "common_video/include/video_frame_buffer.h"
+
+@protocol RTC_OBJC_TYPE
+(RTCVideoFrameBuffer);
+
+namespace webrtc {
+
+class ObjCFrameBuffer : public VideoFrameBuffer {
+ public:
+ explicit ObjCFrameBuffer(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>);
+ ~ObjCFrameBuffer() override;
+
+ Type type() const override;
+
+ int width() const override;
+ int height() const override;
+
+ rtc::scoped_refptr<I420BufferInterface> ToI420() override;
+
+ id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> wrapped_frame_buffer() const;
+
+ private:
+ id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> frame_buffer_;
+ int width_;
+ int height_;
+};
+
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ToObjCVideoFrameBuffer(
+ const rtc::scoped_refptr<VideoFrameBuffer>& buffer);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_OBJC_FRAME_BUFFER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.mm b/third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.mm
new file mode 100644
index 0000000000..566733d692
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_frame_buffer.mm
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/src/objc_frame_buffer.h"
+
+#include "api/make_ref_counted.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h"
+
+namespace webrtc {
+
+namespace {
+
+/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping RTC_OBJC_TYPE(RTCI420Buffer) */
+class ObjCI420FrameBuffer : public I420BufferInterface {
+ public:
+ explicit ObjCI420FrameBuffer(id<RTC_OBJC_TYPE(RTCI420Buffer)> frame_buffer)
+ : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
+ ~ObjCI420FrameBuffer() override {}
+
+ int width() const override { return width_; }
+
+ int height() const override { return height_; }
+
+ const uint8_t* DataY() const override { return frame_buffer_.dataY; }
+
+ const uint8_t* DataU() const override { return frame_buffer_.dataU; }
+
+ const uint8_t* DataV() const override { return frame_buffer_.dataV; }
+
+ int StrideY() const override { return frame_buffer_.strideY; }
+
+ int StrideU() const override { return frame_buffer_.strideU; }
+
+ int StrideV() const override { return frame_buffer_.strideV; }
+
+ private:
+ id<RTC_OBJC_TYPE(RTCI420Buffer)> frame_buffer_;
+ int width_;
+ int height_;
+};
+
+} // namespace
+
+ObjCFrameBuffer::ObjCFrameBuffer(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> frame_buffer)
+ : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
+
+ObjCFrameBuffer::~ObjCFrameBuffer() {}
+
+VideoFrameBuffer::Type ObjCFrameBuffer::type() const {
+ return Type::kNative;
+}
+
+int ObjCFrameBuffer::width() const {
+ return width_;
+}
+
+int ObjCFrameBuffer::height() const {
+ return height_;
+}
+
+rtc::scoped_refptr<I420BufferInterface> ObjCFrameBuffer::ToI420() {
+ return rtc::make_ref_counted<ObjCI420FrameBuffer>([frame_buffer_ toI420]);
+}
+
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ObjCFrameBuffer::wrapped_frame_buffer() const {
+ return frame_buffer_;
+}
+
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ToObjCVideoFrameBuffer(
+ const rtc::scoped_refptr<VideoFrameBuffer>& buffer) {
+ if (buffer->type() == VideoFrameBuffer::Type::kNative) {
+ return static_cast<ObjCFrameBuffer*>(buffer.get())->wrapped_frame_buffer();
+ } else {
+ return [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:buffer->ToI420()];
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.h b/third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.h
new file mode 100644
index 0000000000..709e9dfbe5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_OBJC_NETWORK_MONITOR_H_
+#define SDK_OBJC_NATIVE_SRC_OBJC_NETWORK_MONITOR_H_
+
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/field_trials_view.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "rtc_base/network_monitor.h"
+#include "rtc_base/network_monitor_factory.h"
+#include "rtc_base/string_utils.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "sdk/objc/components/network/RTCNetworkMonitor+Private.h"
+#include "sdk/objc/native/src/network_monitor_observer.h"
+
+namespace webrtc {
+
+class ObjCNetworkMonitorFactory : public rtc::NetworkMonitorFactory {
+ public:
+ ObjCNetworkMonitorFactory() = default;
+ ~ObjCNetworkMonitorFactory() override = default;
+
+ rtc::NetworkMonitorInterface* CreateNetworkMonitor(
+ const FieldTrialsView& field_trials) override;
+};
+
+class ObjCNetworkMonitor : public rtc::NetworkMonitorInterface,
+ public NetworkMonitorObserver {
+ public:
+ ObjCNetworkMonitor();
+ ~ObjCNetworkMonitor() override;
+
+ void Start() override;
+ void Stop() override;
+
+ InterfaceInfo GetInterfaceInfo(absl::string_view interface_name) override;
+
+ // NetworkMonitorObserver override.
+ // Fans out updates to observers on the correct thread.
+ void OnPathUpdate(
+ std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp>
+ adapter_type_by_name) override;
+
+ private:
+ rtc::Thread* thread_ = nullptr;
+ bool started_ = false;
+ std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp>
+ adapter_type_by_name_ RTC_GUARDED_BY(thread_);
+ rtc::scoped_refptr<PendingTaskSafetyFlag> safety_flag_;
+ RTCNetworkMonitor* network_monitor_ = nil;
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_OBJC_NETWORK_MONITOR_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.mm b/third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.mm
new file mode 100644
index 0000000000..535548c64c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_network_monitor.mm
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/src/objc_network_monitor.h"
+#include "absl/strings/string_view.h"
+
+#include <algorithm>
+
+#include "rtc_base/logging.h"
+#include "rtc_base/string_utils.h"
+
+namespace webrtc {
+
+rtc::NetworkMonitorInterface* ObjCNetworkMonitorFactory::CreateNetworkMonitor(
+ const FieldTrialsView& field_trials) {
+ return new ObjCNetworkMonitor();
+}
+
+ObjCNetworkMonitor::ObjCNetworkMonitor() {
+ safety_flag_ = PendingTaskSafetyFlag::Create();
+}
+
+ObjCNetworkMonitor::~ObjCNetworkMonitor() {
+ [network_monitor_ stop];
+ network_monitor_ = nil;
+}
+
+void ObjCNetworkMonitor::Start() {
+ if (started_) {
+ return;
+ }
+ thread_ = rtc::Thread::Current();
+ RTC_DCHECK_RUN_ON(thread_);
+ safety_flag_->SetAlive();
+ network_monitor_ = [[RTCNetworkMonitor alloc] initWithObserver:this];
+ if (network_monitor_ == nil) {
+ RTC_LOG(LS_WARNING) << "Failed to create RTCNetworkMonitor; not available on this OS?";
+ }
+ started_ = true;
+}
+
+void ObjCNetworkMonitor::Stop() {
+ RTC_DCHECK_RUN_ON(thread_);
+ if (!started_) {
+ return;
+ }
+ safety_flag_->SetNotAlive();
+ [network_monitor_ stop];
+ network_monitor_ = nil;
+ started_ = false;
+}
+
+rtc::NetworkMonitorInterface::InterfaceInfo ObjCNetworkMonitor::GetInterfaceInfo(
+ absl::string_view interface_name) {
+ RTC_DCHECK_RUN_ON(thread_);
+ if (adapter_type_by_name_.empty()) {
+ // If we have no path update, assume everything's available, because it's
+ // preferable for WebRTC to try all interfaces rather than none at all.
+ return {
+ .adapter_type = rtc::ADAPTER_TYPE_UNKNOWN,
+ .available = true,
+ };
+ }
+ auto iter = adapter_type_by_name_.find(interface_name);
+ if (iter == adapter_type_by_name_.end()) {
+ return {
+ .adapter_type = rtc::ADAPTER_TYPE_UNKNOWN,
+ .available = false,
+ };
+ }
+
+ return {
+ .adapter_type = iter->second,
+ .available = true,
+ };
+}
+
+void ObjCNetworkMonitor::OnPathUpdate(
+ std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp> adapter_type_by_name) {
+ RTC_DCHECK(network_monitor_ != nil);
+ thread_->PostTask(SafeTask(safety_flag_, [this, adapter_type_by_name] {
+ RTC_DCHECK_RUN_ON(thread_);
+ adapter_type_by_name_ = adapter_type_by_name;
+ InvokeNetworksChangedCallback();
+ }));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.h b/third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.h
new file mode 100644
index 0000000000..30ad8c2a4b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_DECODER_FACTORY_H_
+#define SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_DECODER_FACTORY_H_
+
+#import "base/RTCMacros.h"
+
+#include "api/video_codecs/video_decoder_factory.h"
+#include "media/base/codec.h"
+
+@protocol RTC_OBJC_TYPE
+(RTCVideoDecoderFactory);
+
+namespace webrtc {
+
+class ObjCVideoDecoderFactory : public VideoDecoderFactory {
+ public:
+ explicit ObjCVideoDecoderFactory(id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>);
+ ~ObjCVideoDecoderFactory() override;
+
+ id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> wrapped_decoder_factory() const;
+
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override;
+ std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const SdpVideoFormat& format) override;
+
+ private:
+ id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> decoder_factory_;
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_DECODER_FACTORY_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.mm b/third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.mm
new file mode 100644
index 0000000000..da3b302275
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_decoder_factory.mm
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/src/objc_video_decoder_factory.h"
+
+#import "base/RTCMacros.h"
+#import "base/RTCVideoDecoder.h"
+#import "base/RTCVideoDecoderFactory.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_codec/RTCCodecSpecificInfoH264.h"
+#import "sdk/objc/api/peerconnection/RTCEncodedImage+Private.h"
+#import "sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h"
+#import "sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h"
+#import "sdk/objc/helpers/NSString+StdString.h"
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_decoder.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/objc/native/src/objc_frame_buffer.h"
+
+namespace webrtc {
+
+namespace {
+class ObjCVideoDecoder : public VideoDecoder {
+ public:
+ ObjCVideoDecoder(id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder)
+ : decoder_(decoder), implementation_name_([decoder implementationName].stdString) {}
+
+ bool Configure(const Settings &settings) override {
+ return
+ [decoder_ startDecodeWithNumberOfCores:settings.number_of_cores()] == WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t Decode(const EncodedImage &input_image,
+ bool missing_frames,
+ int64_t render_time_ms = -1) override {
+ RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage =
+ [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:input_image];
+
+ return [decoder_ decode:encodedImage
+ missingFrames:missing_frames
+ codecSpecificInfo:nil
+ renderTimeMs:render_time_ms];
+ }
+
+ int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override {
+ [decoder_ setCallback:^(RTC_OBJC_TYPE(RTCVideoFrame) * frame) {
+ const auto buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
+ VideoFrame videoFrame =
+ VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_timestamp_rtp((uint32_t)(frame.timeStampNs / rtc::kNumNanosecsPerMicrosec))
+ .set_timestamp_ms(0)
+ .set_rotation((VideoRotation)frame.rotation)
+ .build();
+ videoFrame.set_timestamp(frame.timeStamp);
+
+ callback->Decoded(videoFrame);
+ }];
+
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t Release() override { return [decoder_ releaseDecoder]; }
+
+ const char *ImplementationName() const override { return implementation_name_.c_str(); }
+
+ private:
+ id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder_;
+ const std::string implementation_name_;
+};
+} // namespace
+
+ObjCVideoDecoderFactory::ObjCVideoDecoderFactory(
+ id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> decoder_factory)
+ : decoder_factory_(decoder_factory) {}
+
+ObjCVideoDecoderFactory::~ObjCVideoDecoderFactory() {}
+
+id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> ObjCVideoDecoderFactory::wrapped_decoder_factory() const {
+ return decoder_factory_;
+}
+
+std::unique_ptr<VideoDecoder> ObjCVideoDecoderFactory::CreateVideoDecoder(
+ const SdpVideoFormat &format) {
+ NSString *codecName = [NSString stringWithUTF8String:format.name.c_str()];
+ for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * codecInfo in decoder_factory_.supportedCodecs) {
+ if ([codecName isEqualToString:codecInfo.name]) {
+ id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder = [decoder_factory_ createDecoder:codecInfo];
+
+ if ([decoder isKindOfClass:[RTC_OBJC_TYPE(RTCWrappedNativeVideoDecoder) class]]) {
+ return [(RTC_OBJC_TYPE(RTCWrappedNativeVideoDecoder) *)decoder releaseWrappedDecoder];
+ } else {
+ return std::unique_ptr<ObjCVideoDecoder>(new ObjCVideoDecoder(decoder));
+ }
+ }
+ }
+
+ return nullptr;
+}
+
+std::vector<SdpVideoFormat> ObjCVideoDecoderFactory::GetSupportedFormats() const {
+ std::vector<SdpVideoFormat> supported_formats;
+ for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in decoder_factory_.supportedCodecs) {
+ SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
+ supported_formats.push_back(format);
+ }
+
+ return supported_formats;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.h b/third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.h
new file mode 100644
index 0000000000..38db5e6ae7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_ENCODER_FACTORY_H_
+#define SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_ENCODER_FACTORY_H_
+
+#import <Foundation/Foundation.h>
+
+#import "base/RTCMacros.h"
+
+#include "api/video_codecs/video_encoder_factory.h"
+
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoderFactory);
+
+namespace webrtc {
+
+class ObjCVideoEncoderFactory : public VideoEncoderFactory {
+ public:
+ explicit ObjCVideoEncoderFactory(id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>);
+ ~ObjCVideoEncoderFactory() override;
+
+ id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> wrapped_encoder_factory() const;
+
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override;
+ std::vector<SdpVideoFormat> GetImplementations() const override;
+ std::unique_ptr<VideoEncoder> CreateVideoEncoder(
+ const SdpVideoFormat& format) override;
+ std::unique_ptr<EncoderSelectorInterface> GetEncoderSelector() const override;
+
+ private:
+ id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> encoder_factory_;
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_ENCODER_FACTORY_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.mm b/third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.mm
new file mode 100644
index 0000000000..d4ea79cc88
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_encoder_factory.mm
@@ -0,0 +1,209 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/src/objc_video_encoder_factory.h"
+
+#include <string>
+
+#import "base/RTCMacros.h"
+#import "base/RTCVideoEncoder.h"
+#import "base/RTCVideoEncoderFactory.h"
+#import "components/video_codec/RTCCodecSpecificInfoH264+Private.h"
+#import "sdk/objc/api/peerconnection/RTCEncodedImage+Private.h"
+#import "sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h"
+#import "sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h"
+#import "sdk/objc/api/video_codec/RTCVideoCodecConstants.h"
+#import "sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h"
+#import "sdk/objc/helpers/NSString+StdString.h"
+
+#include "api/video/video_frame.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/logging.h"
+#include "sdk/objc/native/src/objc_video_frame.h"
+
+namespace webrtc {
+
+namespace {
+
+class ObjCVideoEncoder : public VideoEncoder {
+ public:
+ ObjCVideoEncoder(id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder)
+ : encoder_(encoder), implementation_name_([encoder implementationName].stdString) {}
+
+ int32_t InitEncode(const VideoCodec *codec_settings, const Settings &encoder_settings) override {
+ RTC_OBJC_TYPE(RTCVideoEncoderSettings) *settings =
+ [[RTC_OBJC_TYPE(RTCVideoEncoderSettings) alloc] initWithNativeVideoCodec:codec_settings];
+ return [encoder_ startEncodeWithSettings:settings
+ numberOfCores:encoder_settings.number_of_cores];
+ }
+
+ int32_t RegisterEncodeCompleteCallback(EncodedImageCallback *callback) override {
+ if (callback) {
+ [encoder_ setCallback:^BOOL(RTC_OBJC_TYPE(RTCEncodedImage) * _Nonnull frame,
+ id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)> _Nonnull info) {
+ EncodedImage encodedImage = [frame nativeEncodedImage];
+
+ // Handle types that can be converted into one of CodecSpecificInfo's hard coded cases.
+ CodecSpecificInfo codecSpecificInfo;
+ if ([info isKindOfClass:[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) class]]) {
+ codecSpecificInfo =
+ [(RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) *)info nativeCodecSpecificInfo];
+ }
+
+ EncodedImageCallback::Result res = callback->OnEncodedImage(encodedImage, &codecSpecificInfo);
+ return res.error == EncodedImageCallback::Result::OK;
+ }];
+ } else {
+ [encoder_ setCallback:nil];
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t Release() override { return [encoder_ releaseEncoder]; }
+
+ int32_t Encode(const VideoFrame &frame,
+ const std::vector<VideoFrameType> *frame_types) override {
+ NSMutableArray<NSNumber *> *rtcFrameTypes = [NSMutableArray array];
+ for (size_t i = 0; i < frame_types->size(); ++i) {
+ [rtcFrameTypes addObject:@(RTCFrameType(frame_types->at(i)))];
+ }
+
+ return [encoder_ encode:ToObjCVideoFrame(frame)
+ codecSpecificInfo:nil
+ frameTypes:rtcFrameTypes];
+ }
+
+ void SetRates(const RateControlParameters &parameters) override {
+ const uint32_t bitrate = parameters.bitrate.get_sum_kbps();
+ const uint32_t framerate = static_cast<uint32_t>(parameters.framerate_fps + 0.5);
+ [encoder_ setBitrate:bitrate framerate:framerate];
+ }
+
+ VideoEncoder::EncoderInfo GetEncoderInfo() const override {
+ EncoderInfo info;
+ info.implementation_name = implementation_name_;
+
+ RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *qp_thresholds = [encoder_ scalingSettings];
+ info.scaling_settings = qp_thresholds ? ScalingSettings(qp_thresholds.low, qp_thresholds.high) :
+ ScalingSettings::kOff;
+
+ info.requested_resolution_alignment = encoder_.resolutionAlignment > 0 ?: 1;
+ info.apply_alignment_to_all_simulcast_layers = encoder_.applyAlignmentToAllSimulcastLayers;
+ info.supports_native_handle = encoder_.supportsNativeHandle;
+ info.is_hardware_accelerated = true;
+ return info;
+ }
+
+ private:
+ id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder_;
+ const std::string implementation_name_;
+};
+
+class ObjcVideoEncoderSelector : public VideoEncoderFactory::EncoderSelectorInterface {
+ public:
+ ObjcVideoEncoderSelector(id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)> selector) {
+ selector_ = selector;
+ }
+ void OnCurrentEncoder(const SdpVideoFormat &format) override {
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *info =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format];
+ [selector_ registerCurrentEncoderInfo:info];
+ }
+ absl::optional<SdpVideoFormat> OnEncoderBroken() override {
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBrokenEncoder];
+ if (info) {
+ return [info nativeSdpVideoFormat];
+ }
+ return absl::nullopt;
+ }
+ absl::optional<SdpVideoFormat> OnAvailableBitrate(const DataRate &rate) override {
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBitrate:rate.kbps<NSInteger>()];
+ if (info) {
+ return [info nativeSdpVideoFormat];
+ }
+ return absl::nullopt;
+ }
+
+ absl::optional<SdpVideoFormat> OnResolutionChange(const RenderResolution &resolution) override {
+ if ([selector_ respondsToSelector:@selector(encoderForResolutionChangeBySize:)]) {
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_
+ encoderForResolutionChangeBySize:CGSizeMake(resolution.Width(), resolution.Height())];
+ if (info) {
+ return [info nativeSdpVideoFormat];
+ }
+ }
+ return absl::nullopt;
+ }
+
+ private:
+ id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)> selector_;
+};
+
+} // namespace
+
+ObjCVideoEncoderFactory::ObjCVideoEncoderFactory(
+ id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> encoder_factory)
+ : encoder_factory_(encoder_factory) {}
+
+ObjCVideoEncoderFactory::~ObjCVideoEncoderFactory() {}
+
+id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> ObjCVideoEncoderFactory::wrapped_encoder_factory() const {
+ return encoder_factory_;
+}
+
+std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetSupportedFormats() const {
+ std::vector<SdpVideoFormat> supported_formats;
+ for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ supportedCodecs]) {
+ SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
+ supported_formats.push_back(format);
+ }
+
+ return supported_formats;
+}
+
+std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetImplementations() const {
+ if ([encoder_factory_ respondsToSelector:@selector(implementations)]) {
+ std::vector<SdpVideoFormat> supported_formats;
+ for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ implementations]) {
+ SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
+ supported_formats.push_back(format);
+ }
+ return supported_formats;
+ }
+ return GetSupportedFormats();
+}
+
+std::unique_ptr<VideoEncoder> ObjCVideoEncoderFactory::CreateVideoEncoder(
+ const SdpVideoFormat &format) {
+ RTC_OBJC_TYPE(RTCVideoCodecInfo) *info =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format];
+ id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder = [encoder_factory_ createEncoder:info];
+ if ([encoder isKindOfClass:[RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) class]]) {
+ return [(RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) *)encoder releaseWrappedEncoder];
+ } else {
+ return std::unique_ptr<ObjCVideoEncoder>(new ObjCVideoEncoder(encoder));
+ }
+}
+
+std::unique_ptr<VideoEncoderFactory::EncoderSelectorInterface>
+ ObjCVideoEncoderFactory::GetEncoderSelector() const {
+ if ([encoder_factory_ respondsToSelector:@selector(encoderSelector)]) {
+ id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)> selector = [encoder_factory_ encoderSelector];
+ if (selector) {
+ return absl::make_unique<ObjcVideoEncoderSelector>(selector);
+ }
+ }
+ return nullptr;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.h b/third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.h
new file mode 100644
index 0000000000..c2931cb2f8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_FRAME_H_
+#define SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_FRAME_H_
+
+#import "base/RTCVideoFrame.h"
+
+#include "api/video/video_frame.h"
+
+namespace webrtc {
+
+RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame& frame);
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_FRAME_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.mm b/third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.mm
new file mode 100644
index 0000000000..2e8ce6153e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_frame.mm
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/src/objc_video_frame.h"
+
+#include "rtc_base/time_utils.h"
+#include "sdk/objc/native/src/objc_frame_buffer.h"
+
+namespace webrtc {
+
+RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame &frame) {
+ RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc]
+ initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer())
+ rotation:RTCVideoRotation(frame.rotation())
+ timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
+ videoFrame.timeStamp = frame.timestamp();
+
+ return videoFrame;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.h b/third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.h
new file mode 100644
index 0000000000..f9c35eae96
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_RENDERER_H_
+#define SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_RENDERER_H_
+
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+#import "base/RTCMacros.h"
+
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+
+@protocol RTC_OBJC_TYPE
+(RTCVideoRenderer);
+
+namespace webrtc {
+
+class ObjCVideoRenderer : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ ObjCVideoRenderer(id<RTC_OBJC_TYPE(RTCVideoRenderer)> renderer);
+ void OnFrame(const VideoFrame& nativeVideoFrame) override;
+
+ private:
+ id<RTC_OBJC_TYPE(RTCVideoRenderer)> renderer_;
+ CGSize size_;
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_RENDERER_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.mm b/third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.mm
new file mode 100644
index 0000000000..4a9b647ec3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_renderer.mm
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/src/objc_video_renderer.h"
+
+#import "base/RTCMacros.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoRenderer.h"
+
+#include "sdk/objc/native/src/objc_video_frame.h"
+
+namespace webrtc {
+
+ObjCVideoRenderer::ObjCVideoRenderer(id<RTC_OBJC_TYPE(RTCVideoRenderer)> renderer)
+ : renderer_(renderer), size_(CGSizeZero) {}
+
+void ObjCVideoRenderer::OnFrame(const VideoFrame& nativeVideoFrame) {
+ RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = ToObjCVideoFrame(nativeVideoFrame);
+
+ CGSize current_size = (videoFrame.rotation % 180 == 0) ?
+ CGSizeMake(videoFrame.width, videoFrame.height) :
+ CGSizeMake(videoFrame.height, videoFrame.width);
+
+ if (!CGSizeEqualToSize(size_, current_size)) {
+ size_ = current_size;
+ [renderer_ setSize:size_];
+ }
+ [renderer_ renderFrame:videoFrame];
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.h b/third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.h
new file mode 100644
index 0000000000..19a3d6db43
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_OBJC_CLASSES_VIDEO_OBJC_VIDEO_TRACK_SOURCE_H_
+#define SDK_OBJC_CLASSES_VIDEO_OBJC_VIDEO_TRACK_SOURCE_H_
+
+#import "base/RTCVideoCapturer.h"
+
+#include "base/RTCMacros.h"
+#include "media/base/adapted_video_track_source.h"
+#include "rtc_base/timestamp_aligner.h"
+
+RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCVideoFrame));
+
+@interface RTCObjCVideoSourceAdapter : NSObject <RTC_OBJC_TYPE (RTCVideoCapturerDelegate)>
+@end
+
+namespace webrtc {
+
+class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource {
+ public:
+ ObjCVideoTrackSource();
+ explicit ObjCVideoTrackSource(bool is_screencast);
+ explicit ObjCVideoTrackSource(RTCObjCVideoSourceAdapter* adapter);
+
+ bool is_screencast() const override;
+
+ // Indicates that the encoder should denoise video before encoding it.
+ // If it is not set, the default configuration is used which is different
+ // depending on video codec.
+ absl::optional<bool> needs_denoising() const override;
+
+ SourceState state() const override;
+
+ bool remote() const override;
+
+ void OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame);
+
+ // Called by RTCVideoSource.
+ void OnOutputFormatRequest(int width, int height, int fps);
+
+ private:
+ rtc::VideoBroadcaster broadcaster_;
+ rtc::TimestampAligner timestamp_aligner_;
+
+ RTCObjCVideoSourceAdapter* adapter_;
+ bool is_screencast_;
+};
+
+} // namespace webrtc
+
+#endif // SDK_OBJC_CLASSES_VIDEO_OBJC_VIDEO_TRACK_SOURCE_H_
diff --git a/third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.mm b/third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.mm
new file mode 100644
index 0000000000..7937e90505
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/native/src/objc_video_track_source.mm
@@ -0,0 +1,131 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/native/src/objc_video_track_source.h"
+
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+#include "api/video/i420_buffer.h"
+#include "sdk/objc/native/src/objc_frame_buffer.h"
+
+@interface RTCObjCVideoSourceAdapter ()
+@property(nonatomic) webrtc::ObjCVideoTrackSource *objCVideoTrackSource;
+@end
+
+@implementation RTCObjCVideoSourceAdapter
+
+@synthesize objCVideoTrackSource = _objCVideoTrackSource;
+
+- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer
+ didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ _objCVideoTrackSource->OnCapturedFrame(frame);
+}
+
+@end
+
+namespace webrtc {
+
+ObjCVideoTrackSource::ObjCVideoTrackSource() : ObjCVideoTrackSource(false) {}
+
+ObjCVideoTrackSource::ObjCVideoTrackSource(bool is_screencast)
+ : AdaptedVideoTrackSource(/* required resolution alignment */ 2),
+ is_screencast_(is_screencast) {}
+
+ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter) : adapter_(adapter) {
+ adapter_.objCVideoTrackSource = this;
+}
+
+bool ObjCVideoTrackSource::is_screencast() const {
+ return is_screencast_;
+}
+
+absl::optional<bool> ObjCVideoTrackSource::needs_denoising() const {
+ return false;
+}
+
+MediaSourceInterface::SourceState ObjCVideoTrackSource::state() const {
+ return SourceState::kLive;
+}
+
+bool ObjCVideoTrackSource::remote() const {
+ return false;
+}
+
+void ObjCVideoTrackSource::OnOutputFormatRequest(int width, int height, int fps) {
+ cricket::VideoFormat format(width, height, cricket::VideoFormat::FpsToInterval(fps), 0);
+ video_adapter()->OnOutputFormatRequest(format);
+}
+
+void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame) {
+ const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec;
+ const int64_t translated_timestamp_us =
+ timestamp_aligner_.TranslateTimestamp(timestamp_us, rtc::TimeMicros());
+
+ int adapted_width;
+ int adapted_height;
+ int crop_width;
+ int crop_height;
+ int crop_x;
+ int crop_y;
+ if (!AdaptFrame(frame.width,
+ frame.height,
+ timestamp_us,
+ &adapted_width,
+ &adapted_height,
+ &crop_width,
+ &crop_height,
+ &crop_x,
+ &crop_y)) {
+ return;
+ }
+
+ rtc::scoped_refptr<VideoFrameBuffer> buffer;
+ if (adapted_width == frame.width && adapted_height == frame.height) {
+ // No adaption - optimized path.
+ buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
+ } else if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
+ // Adapted CVPixelBuffer frame.
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+ (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
+ buffer = rtc::make_ref_counted<ObjCFrameBuffer>([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
+ initWithPixelBuffer:rtcPixelBuffer.pixelBuffer
+ adaptedWidth:adapted_width
+ adaptedHeight:adapted_height
+ cropWidth:crop_width
+ cropHeight:crop_height
+ cropX:crop_x + rtcPixelBuffer.cropX
+ cropY:crop_y + rtcPixelBuffer.cropY]);
+ } else {
+ // Adapted I420 frame.
+ // TODO(magjed): Optimize this I420 path.
+ rtc::scoped_refptr<I420Buffer> i420_buffer = I420Buffer::Create(adapted_width, adapted_height);
+ buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
+ i420_buffer->CropAndScaleFrom(*buffer->ToI420(), crop_x, crop_y, crop_width, crop_height);
+ buffer = i420_buffer;
+ }
+
+ // Applying rotation is only supported for legacy reasons and performance is
+ // not critical here.
+ VideoRotation rotation = static_cast<VideoRotation>(frame.rotation);
+ if (apply_rotation() && rotation != kVideoRotation_0) {
+ buffer = I420Buffer::Rotate(*buffer->ToI420(), rotation);
+ rotation = kVideoRotation_0;
+ }
+
+ OnFrame(VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_rotation(rotation)
+ .set_timestamp_us(translated_timestamp_us)
+ .build());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm b/third_party/libwebrtc/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm
new file mode 100644
index 0000000000..4c8bf348f4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm
@@ -0,0 +1,469 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include "sdk/objc/native/src/objc_video_track_source.h"
+
+#import "api/video_frame_buffer/RTCNativeI420Buffer+Private.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+#import "frame_buffer_helpers.h"
+
+#include "api/scoped_refptr.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "media/base/fake_video_renderer.h"
+#include "sdk/objc/native/api/video_frame.h"
+
+typedef void (^VideoSinkCallback)(RTC_OBJC_TYPE(RTCVideoFrame) *);
+
+namespace {
+
+class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ ObjCCallbackVideoSink(VideoSinkCallback callback) : callback_(callback) {}
+
+ void OnFrame(const webrtc::VideoFrame &frame) override {
+ callback_(NativeToObjCVideoFrame(frame));
+ }
+
+ private:
+ VideoSinkCallback callback_;
+};
+
+} // namespace
+
+@interface ObjCVideoTrackSourceTests : XCTestCase
+@end
+
+@implementation ObjCVideoTrackSourceTests {
+ rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> _video_source;
+}
+
+- (void)setUp {
+ _video_source = rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>();
+}
+
+- (void)tearDown {
+ _video_source = NULL;
+}
+
+- (void)testOnCapturedFrameAdaptsFrame {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(video_renderer, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ XCTAssertEqual(video_renderer->num_rendered_frames(), 1);
+ XCTAssertEqual(video_renderer->width(), 360);
+ XCTAssertEqual(video_renderer->height(), 640);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFrameAdaptsFrameWithAlignment {
+ // Requesting to adapt 1280x720 to 912x514 gives 639x360 without alignment. The 639 causes issues
+ // with some hardware encoders (e.g. HEVC) so in this test we verify that the alignment is set and
+ // respected.
+
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(video_renderer, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(912, 514, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ XCTAssertEqual(video_renderer->num_rendered_frames(), 1);
+ XCTAssertEqual(video_renderer->width(), 360);
+ XCTAssertEqual(video_renderer->height(), 640);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFrameAdaptationResultsInCommonResolutions {
+ // Some of the most common resolutions used in the wild are 640x360, 480x270 and 320x180.
+ // Make sure that we properly scale down to exactly these resolutions.
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(video_renderer, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ XCTAssertEqual(video_renderer->num_rendered_frames(), 1);
+ XCTAssertEqual(video_renderer->width(), 360);
+ XCTAssertEqual(video_renderer->height(), 640);
+
+ _video_source->OnOutputFormatRequest(480, 270, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ XCTAssertEqual(video_renderer->num_rendered_frames(), 2);
+ XCTAssertEqual(video_renderer->width(), 270);
+ XCTAssertEqual(video_renderer->height(), 480);
+
+ _video_source->OnOutputFormatRequest(320, 180, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ XCTAssertEqual(video_renderer->num_rendered_frames(), 3);
+ XCTAssertEqual(video_renderer->width(), 180);
+ XCTAssertEqual(video_renderer->height(), 320);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFrameWithoutAdaptation {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 360, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(frame.width, outputFrame.width);
+ XCTAssertEqual(frame.height, outputFrame.height);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
+ XCTAssertEqual(buffer.cropX, outputBuffer.cropX);
+ XCTAssertEqual(buffer.cropY, outputBuffer.cropY);
+ XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFrameCVPixelBufferNeedsAdaptation {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(outputFrame.width, 360);
+ XCTAssertEqual(outputFrame.height, 640);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
+ XCTAssertEqual(outputBuffer.cropX, 0);
+ XCTAssertEqual(outputBuffer.cropY, 0);
+ XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFrameCVPixelBufferNeedsCropping {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(outputFrame.width, 360);
+ XCTAssertEqual(outputFrame.height, 640);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
+ XCTAssertEqual(outputBuffer.cropX, 10);
+ XCTAssertEqual(outputBuffer.cropY, 0);
+ XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFramePreAdaptedCVPixelBufferNeedsAdaptation {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ // Create a frame that's already adapted down.
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+ adaptedWidth:640
+ adaptedHeight:360
+ cropWidth:720
+ cropHeight:1280
+ cropX:0
+ cropY:0];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(outputFrame.width, 480);
+ XCTAssertEqual(outputFrame.height, 270);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
+ XCTAssertEqual(outputBuffer.cropX, 0);
+ XCTAssertEqual(outputBuffer.cropY, 0);
+ XCTAssertEqual(outputBuffer.cropWidth, 640);
+ XCTAssertEqual(outputBuffer.cropHeight, 360);
+ XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(480, 270, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFramePreCroppedCVPixelBufferNeedsCropping {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+ adaptedWidth:370
+ adaptedHeight:640
+ cropWidth:370
+ cropHeight:640
+ cropX:10
+ cropY:0];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(outputFrame.width, 360);
+ XCTAssertEqual(outputFrame.height, 640);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
+ XCTAssertEqual(outputBuffer.cropX, 14);
+ XCTAssertEqual(outputBuffer.cropY, 0);
+ XCTAssertEqual(outputBuffer.cropWidth, 360);
+ XCTAssertEqual(outputBuffer.cropHeight, 640);
+ XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFrameSmallerPreCroppedCVPixelBufferNeedsCropping {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+ adaptedWidth:300
+ adaptedHeight:640
+ cropWidth:300
+ cropHeight:640
+ cropX:40
+ cropY:0];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(outputFrame.width, 300);
+ XCTAssertEqual(outputFrame.height, 534);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
+ XCTAssertEqual(outputBuffer.cropX, 40);
+ XCTAssertEqual(outputBuffer.cropY, 52);
+ XCTAssertEqual(outputBuffer.cropWidth, 300);
+ XCTAssertEqual(outputBuffer.cropHeight, 534);
+ XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testOnCapturedFrameI420BufferNeedsAdaptation {
+ rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(720, 1280);
+ RTC_OBJC_TYPE(RTCI420Buffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(outputFrame.width, 360);
+ XCTAssertEqual(outputFrame.height, 640);
+
+ RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer;
+
+ double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
+ XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+}
+
+- (void)testOnCapturedFrameI420BufferNeedsCropping {
+ rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(380, 640);
+ RTC_OBJC_TYPE(RTCI420Buffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer];
+ RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
+ ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
+ XCTAssertEqual(outputFrame.width, 360);
+ XCTAssertEqual(outputFrame.height, 640);
+
+ RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer;
+
+ double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
+ XCTAssertGreaterThanOrEqual(psnr, 40);
+
+ [callbackExpectation fulfill];
+ });
+
+ const rtc::VideoSinkWants video_sink_wants;
+ rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
+ video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
+
+ _video_source->OnOutputFormatRequest(640, 360, 30);
+ _video_source->OnCapturedFrame(frame);
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCAudioDeviceModule_xctest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCAudioDeviceModule_xctest.mm
new file mode 100644
index 0000000000..f8ce844652
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCAudioDeviceModule_xctest.mm
@@ -0,0 +1,593 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <XCTest/XCTest.h>
+
+#if defined(WEBRTC_IOS)
+#import "sdk/objc/native/api/audio_device_module.h"
+#endif
+
+#include "api/scoped_refptr.h"
+
+typedef int32_t(^NeedMorePlayDataBlock)(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ size_t& nSamplesOut,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms);
+
+typedef int32_t(^RecordedDataIsAvailableBlock)(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel);
+
+
+// This class implements the AudioTransport API and forwards all methods to the appropriate blocks.
+class MockAudioTransport : public webrtc::AudioTransport {
+public:
+ MockAudioTransport() {}
+ ~MockAudioTransport() override {}
+
+ void expectNeedMorePlayData(NeedMorePlayDataBlock block) {
+ needMorePlayDataBlock = block;
+ }
+
+ void expectRecordedDataIsAvailable(RecordedDataIsAvailableBlock block) {
+ recordedDataIsAvailableBlock = block;
+ }
+
+ int32_t NeedMorePlayData(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ size_t& nSamplesOut,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) override {
+ return needMorePlayDataBlock(nSamples,
+ nBytesPerSample,
+ nChannels,
+ samplesPerSec,
+ audioSamples,
+ nSamplesOut,
+ elapsed_time_ms,
+ ntp_time_ms);
+ }
+
+ int32_t RecordedDataIsAvailable(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel) override {
+ return recordedDataIsAvailableBlock(audioSamples,
+ nSamples,
+ nBytesPerSample,
+ nChannels,
+ samplesPerSec,
+ totalDelayMS,
+ clockDrift,
+ currentMicLevel,
+ keyPressed,
+ newMicLevel);
+ }
+
+ void PullRenderData(int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames,
+ void* audio_data,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) override {}
+
+ private:
+ NeedMorePlayDataBlock needMorePlayDataBlock;
+ RecordedDataIsAvailableBlock recordedDataIsAvailableBlock;
+};
+
+// Number of callbacks (input or output) the tests waits for before we set
+// an event indicating that the test was OK.
+static const NSUInteger kNumCallbacks = 10;
+// Max amount of time we wait for an event to be set while counting callbacks.
+static const NSTimeInterval kTestTimeOutInSec = 20.0;
+// Number of bits per PCM audio sample.
+static const NSUInteger kBitsPerSample = 16;
+// Number of bytes per PCM audio sample.
+static const NSUInteger kBytesPerSample = kBitsPerSample / 8;
+// Average number of audio callbacks per second assuming 10ms packet size.
+static const NSUInteger kNumCallbacksPerSecond = 100;
+// Play out a test file during this time (unit is in seconds).
+static const NSUInteger kFilePlayTimeInSec = 15;
+// Run the full-duplex test during this time (unit is in seconds).
+// Note that first `kNumIgnoreFirstCallbacks` are ignored.
+static const NSUInteger kFullDuplexTimeInSec = 10;
+// Wait for the callback sequence to stabilize by ignoring this amount of the
+// initial callbacks (avoids initial FIFO access).
+// Only used in the RunPlayoutAndRecordingInFullDuplex test.
+static const NSUInteger kNumIgnoreFirstCallbacks = 50;
+
+@interface RTCAudioDeviceModuleTests : XCTestCase {
+ rtc::scoped_refptr<webrtc::AudioDeviceModule> audioDeviceModule;
+ MockAudioTransport mock;
+}
+
+@property(nonatomic, assign) webrtc::AudioParameters playoutParameters;
+@property(nonatomic, assign) webrtc::AudioParameters recordParameters;
+
+@end
+
+@implementation RTCAudioDeviceModuleTests
+
+@synthesize playoutParameters;
+@synthesize recordParameters;
+
+- (void)setUp {
+ [super setUp];
+ audioDeviceModule = webrtc::CreateAudioDeviceModule();
+ XCTAssertEqual(0, audioDeviceModule->Init());
+ XCTAssertEqual(0, audioDeviceModule->GetPlayoutAudioParameters(&playoutParameters));
+ XCTAssertEqual(0, audioDeviceModule->GetRecordAudioParameters(&recordParameters));
+}
+
+- (void)tearDown {
+ XCTAssertEqual(0, audioDeviceModule->Terminate());
+ audioDeviceModule = nullptr;
+ [super tearDown];
+}
+
+- (void)startPlayout {
+ XCTAssertFalse(audioDeviceModule->Playing());
+ XCTAssertEqual(0, audioDeviceModule->InitPlayout());
+ XCTAssertTrue(audioDeviceModule->PlayoutIsInitialized());
+ XCTAssertEqual(0, audioDeviceModule->StartPlayout());
+ XCTAssertTrue(audioDeviceModule->Playing());
+}
+
+- (void)stopPlayout {
+ XCTAssertEqual(0, audioDeviceModule->StopPlayout());
+ XCTAssertFalse(audioDeviceModule->Playing());
+}
+
+- (void)startRecording{
+ XCTAssertFalse(audioDeviceModule->Recording());
+ XCTAssertEqual(0, audioDeviceModule->InitRecording());
+ XCTAssertTrue(audioDeviceModule->RecordingIsInitialized());
+ XCTAssertEqual(0, audioDeviceModule->StartRecording());
+ XCTAssertTrue(audioDeviceModule->Recording());
+}
+
+- (void)stopRecording{
+ XCTAssertEqual(0, audioDeviceModule->StopRecording());
+ XCTAssertFalse(audioDeviceModule->Recording());
+}
+
+- (NSURL*)fileURLForSampleRate:(int)sampleRate {
+ XCTAssertTrue(sampleRate == 48000 || sampleRate == 44100 || sampleRate == 16000);
+ NSString *filename = [NSString stringWithFormat:@"audio_short%d", sampleRate / 1000];
+ NSURL *url = [[NSBundle mainBundle] URLForResource:filename withExtension:@"pcm"];
+ XCTAssertNotNil(url);
+
+ return url;
+}
+
+#pragma mark - Tests
+
+- (void)testConstructDestruct {
+ // Using the test fixture to create and destruct the audio device module.
+}
+
+- (void)testInitTerminate {
+ // Initialization is part of the test fixture.
+ XCTAssertTrue(audioDeviceModule->Initialized());
+ XCTAssertEqual(0, audioDeviceModule->Terminate());
+ XCTAssertFalse(audioDeviceModule->Initialized());
+}
+
+// Tests that playout can be initiated, started and stopped. No audio callback
+// is registered in this test.
+- (void)testStartStopPlayout {
+ [self startPlayout];
+ [self stopPlayout];
+ [self startPlayout];
+ [self stopPlayout];
+}
+
+// Tests that recording can be initiated, started and stopped. No audio callback
+// is registered in this test.
+- (void)testStartStopRecording {
+ [self startRecording];
+ [self stopRecording];
+ [self startRecording];
+ [self stopRecording];
+}
+// Verify that calling StopPlayout() will leave us in an uninitialized state
+// which will require a new call to InitPlayout(). This test does not call
+// StartPlayout() while being uninitialized since doing so will hit a
+// RTC_DCHECK.
+- (void)testStopPlayoutRequiresInitToRestart {
+ XCTAssertEqual(0, audioDeviceModule->InitPlayout());
+ XCTAssertEqual(0, audioDeviceModule->StartPlayout());
+ XCTAssertEqual(0, audioDeviceModule->StopPlayout());
+ XCTAssertFalse(audioDeviceModule->PlayoutIsInitialized());
+}
+
+// Verify that we can create two ADMs and start playing on the second ADM.
+// Only the first active instance shall activate an audio session and the
+// last active instance shall deactivate the audio session. The test does not
+// explicitly verify correct audio session calls but instead focuses on
+// ensuring that audio starts for both ADMs.
+- (void)testStartPlayoutOnTwoInstances {
+ // Create and initialize a second/extra ADM instance. The default ADM is
+ // created by the test harness.
+ rtc::scoped_refptr<webrtc::AudioDeviceModule> secondAudioDeviceModule =
+ webrtc::CreateAudioDeviceModule();
+ XCTAssertNotEqual(secondAudioDeviceModule.get(), nullptr);
+ XCTAssertEqual(0, secondAudioDeviceModule->Init());
+
+ // Start playout for the default ADM but don't wait here. Instead use the
+ // upcoming second stream for that. We set the same expectation on number
+ // of callbacks as for the second stream.
+ mock.expectNeedMorePlayData(^int32_t(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void *audioSamples,
+ size_t &nSamplesOut,
+ int64_t *elapsed_time_ms,
+ int64_t *ntp_time_ms) {
+ nSamplesOut = nSamples;
+ XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
+ XCTAssertEqual(nBytesPerSample, kBytesPerSample);
+ XCTAssertEqual(nChannels, self.playoutParameters.channels());
+ XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
+ XCTAssertNotEqual((void*)NULL, audioSamples);
+
+ return 0;
+ });
+
+ XCTAssertEqual(0, audioDeviceModule->RegisterAudioCallback(&mock));
+ [self startPlayout];
+
+ // Initialize playout for the second ADM. If all is OK, the second ADM shall
+ // reuse the audio session activated when the first ADM started playing.
+ // This call will also ensure that we avoid a problem related to initializing
+ // two different audio unit instances back to back (see webrtc:5166 for
+ // details).
+ XCTAssertEqual(0, secondAudioDeviceModule->InitPlayout());
+ XCTAssertTrue(secondAudioDeviceModule->PlayoutIsInitialized());
+
+ // Start playout for the second ADM and verify that it starts as intended.
+ // Passing this test ensures that initialization of the second audio unit
+ // has been done successfully and that there is no conflict with the already
+ // playing first ADM.
+ XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
+ __block int num_callbacks = 0;
+
+ MockAudioTransport mock2;
+ mock2.expectNeedMorePlayData(^int32_t(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void *audioSamples,
+ size_t &nSamplesOut,
+ int64_t *elapsed_time_ms,
+ int64_t *ntp_time_ms) {
+ nSamplesOut = nSamples;
+ XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
+ XCTAssertEqual(nBytesPerSample, kBytesPerSample);
+ XCTAssertEqual(nChannels, self.playoutParameters.channels());
+ XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
+ XCTAssertNotEqual((void*)NULL, audioSamples);
+ if (++num_callbacks == kNumCallbacks) {
+ [playoutExpectation fulfill];
+ }
+
+ return 0;
+ });
+
+ XCTAssertEqual(0, secondAudioDeviceModule->RegisterAudioCallback(&mock2));
+ XCTAssertEqual(0, secondAudioDeviceModule->StartPlayout());
+ XCTAssertTrue(secondAudioDeviceModule->Playing());
+ [self waitForExpectationsWithTimeout:kTestTimeOutInSec handler:nil];
+ [self stopPlayout];
+ XCTAssertEqual(0, secondAudioDeviceModule->StopPlayout());
+ XCTAssertFalse(secondAudioDeviceModule->Playing());
+ XCTAssertFalse(secondAudioDeviceModule->PlayoutIsInitialized());
+
+ XCTAssertEqual(0, secondAudioDeviceModule->Terminate());
+}
+
+// Start playout and verify that the native audio layer starts asking for real
+// audio samples to play out using the NeedMorePlayData callback.
+- (void)testStartPlayoutVerifyCallbacks {
+
+ XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
+ __block int num_callbacks = 0;
+ mock.expectNeedMorePlayData(^int32_t(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void *audioSamples,
+ size_t &nSamplesOut,
+ int64_t *elapsed_time_ms,
+ int64_t *ntp_time_ms) {
+ nSamplesOut = nSamples;
+ XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
+ XCTAssertEqual(nBytesPerSample, kBytesPerSample);
+ XCTAssertEqual(nChannels, self.playoutParameters.channels());
+ XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
+ XCTAssertNotEqual((void*)NULL, audioSamples);
+ if (++num_callbacks == kNumCallbacks) {
+ [playoutExpectation fulfill];
+ }
+ return 0;
+ });
+
+ XCTAssertEqual(0, audioDeviceModule->RegisterAudioCallback(&mock));
+
+ [self startPlayout];
+ [self waitForExpectationsWithTimeout:kTestTimeOutInSec handler:nil];
+ [self stopPlayout];
+}
+
+// Start recording and verify that the native audio layer starts feeding real
+// audio samples via the RecordedDataIsAvailable callback.
+- (void)testStartRecordingVerifyCallbacks {
+ XCTestExpectation *recordExpectation =
+ [self expectationWithDescription:@"RecordedDataIsAvailable"];
+ __block int num_callbacks = 0;
+
+ mock.expectRecordedDataIsAvailable(^(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel) {
+ XCTAssertNotEqual((void*)NULL, audioSamples);
+ XCTAssertEqual(nSamples, self.recordParameters.frames_per_10ms_buffer());
+ XCTAssertEqual(nBytesPerSample, kBytesPerSample);
+ XCTAssertEqual(nChannels, self.recordParameters.channels());
+ XCTAssertEqual((int)samplesPerSec, self.recordParameters.sample_rate());
+ XCTAssertEqual(0, clockDrift);
+ XCTAssertEqual(0u, currentMicLevel);
+ XCTAssertFalse(keyPressed);
+ if (++num_callbacks == kNumCallbacks) {
+ [recordExpectation fulfill];
+ }
+
+ return 0;
+ });
+
+ XCTAssertEqual(0, audioDeviceModule->RegisterAudioCallback(&mock));
+ [self startRecording];
+ [self waitForExpectationsWithTimeout:kTestTimeOutInSec handler:nil];
+ [self stopRecording];
+}
+
+// Start playout and recording (full-duplex audio) and verify that audio is
+// active in both directions.
+- (void)testStartPlayoutAndRecordingVerifyCallbacks {
+ XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
+ __block NSUInteger callbackCount = 0;
+
+ XCTestExpectation *recordExpectation =
+ [self expectationWithDescription:@"RecordedDataIsAvailable"];
+ recordExpectation.expectedFulfillmentCount = kNumCallbacks;
+
+ mock.expectNeedMorePlayData(^int32_t(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void *audioSamples,
+ size_t &nSamplesOut,
+ int64_t *elapsed_time_ms,
+ int64_t *ntp_time_ms) {
+ nSamplesOut = nSamples;
+ XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
+ XCTAssertEqual(nBytesPerSample, kBytesPerSample);
+ XCTAssertEqual(nChannels, self.playoutParameters.channels());
+ XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
+ XCTAssertNotEqual((void*)NULL, audioSamples);
+ if (callbackCount++ >= kNumCallbacks) {
+ [playoutExpectation fulfill];
+ }
+
+ return 0;
+ });
+
+ mock.expectRecordedDataIsAvailable(^(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel) {
+ XCTAssertNotEqual((void*)NULL, audioSamples);
+ XCTAssertEqual(nSamples, self.recordParameters.frames_per_10ms_buffer());
+ XCTAssertEqual(nBytesPerSample, kBytesPerSample);
+ XCTAssertEqual(nChannels, self.recordParameters.channels());
+ XCTAssertEqual((int)samplesPerSec, self.recordParameters.sample_rate());
+ XCTAssertEqual(0, clockDrift);
+ XCTAssertEqual(0u, currentMicLevel);
+ XCTAssertFalse(keyPressed);
+ [recordExpectation fulfill];
+
+ return 0;
+ });
+
+ XCTAssertEqual(0, audioDeviceModule->RegisterAudioCallback(&mock));
+ [self startPlayout];
+ [self startRecording];
+ [self waitForExpectationsWithTimeout:kTestTimeOutInSec handler:nil];
+ [self stopRecording];
+ [self stopPlayout];
+}
+
+// Start playout and read audio from an external PCM file when the audio layer
+// asks for data to play out. Real audio is played out in this test but it does
+// not contain any explicit verification that the audio quality is perfect.
+- (void)testRunPlayoutWithFileAsSource {
+ XCTAssertEqual(1u, playoutParameters.channels());
+
+ // Using XCTestExpectation to count callbacks is very slow.
+ XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
+ const int expectedCallbackCount = kFilePlayTimeInSec * kNumCallbacksPerSecond;
+ __block int callbackCount = 0;
+
+ NSURL *fileURL = [self fileURLForSampleRate:playoutParameters.sample_rate()];
+ NSInputStream *inputStream = [[NSInputStream alloc] initWithURL:fileURL];
+
+ mock.expectNeedMorePlayData(^int32_t(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void *audioSamples,
+ size_t &nSamplesOut,
+ int64_t *elapsed_time_ms,
+ int64_t *ntp_time_ms) {
+ [inputStream read:(uint8_t *)audioSamples maxLength:nSamples*nBytesPerSample*nChannels];
+ nSamplesOut = nSamples;
+ if (callbackCount++ == expectedCallbackCount) {
+ [playoutExpectation fulfill];
+ }
+
+ return 0;
+ });
+
+ XCTAssertEqual(0, audioDeviceModule->RegisterAudioCallback(&mock));
+ [self startPlayout];
+ NSTimeInterval waitTimeout = kFilePlayTimeInSec * 2.0;
+ [self waitForExpectationsWithTimeout:waitTimeout handler:nil];
+ [self stopPlayout];
+}
+
+- (void)testDevices {
+ // Device enumeration is not supported. Verify fixed values only.
+ XCTAssertEqual(1, audioDeviceModule->PlayoutDevices());
+ XCTAssertEqual(1, audioDeviceModule->RecordingDevices());
+}
+
+// Start playout and recording and store recorded data in an intermediate FIFO
+// buffer from which the playout side then reads its samples in the same order
+// as they were stored. Under ideal circumstances, a callback sequence would
+// look like: ...+-+-+-+-+-+-+-..., where '+' means 'packet recorded' and '-'
+// means 'packet played'. Under such conditions, the FIFO would only contain
+// one packet on average. However, under more realistic conditions, the size
+// of the FIFO will vary more due to an unbalance between the two sides.
+// This test tries to verify that the device maintains a balanced callback-
+// sequence by running in loopback for ten seconds while measuring the size
+// (max and average) of the FIFO. The size of the FIFO is increased by the
+// recording side and decreased by the playout side.
+// TODO(henrika): tune the final test parameters after running tests on several
+// different devices.
+- (void)testRunPlayoutAndRecordingInFullDuplex {
+ XCTAssertEqual(recordParameters.channels(), playoutParameters.channels());
+ XCTAssertEqual(recordParameters.sample_rate(), playoutParameters.sample_rate());
+
+ XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
+ __block NSUInteger playoutCallbacks = 0;
+ NSUInteger expectedPlayoutCallbacks = kFullDuplexTimeInSec * kNumCallbacksPerSecond;
+
+ // FIFO queue and measurements
+ NSMutableArray *fifoBuffer = [NSMutableArray arrayWithCapacity:20];
+ __block NSUInteger fifoMaxSize = 0;
+ __block NSUInteger fifoTotalWrittenElements = 0;
+ __block NSUInteger fifoWriteCount = 0;
+
+ mock.expectRecordedDataIsAvailable(^(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel) {
+ if (fifoWriteCount++ < kNumIgnoreFirstCallbacks) {
+ return 0;
+ }
+
+ NSData *data = [NSData dataWithBytes:audioSamples length:nSamples*nBytesPerSample*nChannels];
+ @synchronized(fifoBuffer) {
+ [fifoBuffer addObject:data];
+ fifoMaxSize = MAX(fifoMaxSize, fifoBuffer.count);
+ fifoTotalWrittenElements += fifoBuffer.count;
+ }
+
+ return 0;
+ });
+
+ mock.expectNeedMorePlayData(^int32_t(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void *audioSamples,
+ size_t &nSamplesOut,
+ int64_t *elapsed_time_ms,
+ int64_t *ntp_time_ms) {
+ nSamplesOut = nSamples;
+ NSData *data;
+ @synchronized(fifoBuffer) {
+ data = fifoBuffer.firstObject;
+ if (data) {
+ [fifoBuffer removeObjectAtIndex:0];
+ }
+ }
+
+ if (data) {
+ memcpy(audioSamples, (char*) data.bytes, data.length);
+ } else {
+ memset(audioSamples, 0, nSamples*nBytesPerSample*nChannels);
+ }
+
+ if (playoutCallbacks++ == expectedPlayoutCallbacks) {
+ [playoutExpectation fulfill];
+ }
+ return 0;
+ });
+
+ XCTAssertEqual(0, audioDeviceModule->RegisterAudioCallback(&mock));
+ [self startRecording];
+ [self startPlayout];
+ NSTimeInterval waitTimeout = kFullDuplexTimeInSec * 2.0;
+ [self waitForExpectationsWithTimeout:waitTimeout handler:nil];
+
+ size_t fifoAverageSize =
+ (fifoTotalWrittenElements == 0)
+ ? 0.0
+ : 0.5 + (double)fifoTotalWrittenElements / (fifoWriteCount - kNumIgnoreFirstCallbacks);
+
+ [self stopPlayout];
+ [self stopRecording];
+ XCTAssertLessThan(fifoAverageSize, 10u);
+ XCTAssertLessThan(fifoMaxSize, 20u);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCAudioDevice_xctest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCAudioDevice_xctest.mm
new file mode 100644
index 0000000000..e01fdbd6e3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCAudioDevice_xctest.mm
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <XCTest/XCTest.h>
+
+#include "api/task_queue/default_task_queue_factory.h"
+
+#import "sdk/objc/components/audio/RTCAudioSession+Private.h"
+#import "sdk/objc/native/api/audio_device_module.h"
+#import "sdk/objc/native/src/audio/audio_device_ios.h"
+
+@interface RTCAudioDeviceTests : XCTestCase {
+ rtc::scoped_refptr<webrtc::AudioDeviceModule> _audioDeviceModule;
+ std::unique_ptr<webrtc::ios_adm::AudioDeviceIOS> _audio_device;
+}
+
+@property(nonatomic) RTC_OBJC_TYPE(RTCAudioSession) * audioSession;
+
+@end
+
+@implementation RTCAudioDeviceTests
+
+@synthesize audioSession = _audioSession;
+
+- (void)setUp {
+ [super setUp];
+
+ _audioDeviceModule = webrtc::CreateAudioDeviceModule();
+ _audio_device.reset(new webrtc::ios_adm::AudioDeviceIOS(/*bypass_voice_processing=*/false));
+ self.audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+
+ NSError *error = nil;
+ [self.audioSession lockForConfiguration];
+ [self.audioSession setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:0 error:&error];
+ XCTAssertNil(error);
+
+ [self.audioSession setMode:AVAudioSessionModeVoiceChat error:&error];
+ XCTAssertNil(error);
+
+ [self.audioSession setActive:YES error:&error];
+ XCTAssertNil(error);
+
+ [self.audioSession unlockForConfiguration];
+}
+
+- (void)tearDown {
+ _audio_device->Terminate();
+ _audio_device.reset(nullptr);
+ _audioDeviceModule = nullptr;
+ [self.audioSession notifyDidEndInterruptionWithShouldResumeSession:NO];
+
+ [super tearDown];
+}
+
+// Verifies that the AudioDeviceIOS is_interrupted_ flag is reset correctly
+// after an iOS AVAudioSessionInterruptionTypeEnded notification event.
+// AudioDeviceIOS listens to RTC_OBJC_TYPE(RTCAudioSession) interrupted notifications by:
+// - In AudioDeviceIOS.InitPlayOrRecord registers its audio_session_observer_
+// callback with RTC_OBJC_TYPE(RTCAudioSession)'s delegate list.
+// - When RTC_OBJC_TYPE(RTCAudioSession) receives an iOS audio interrupted notification, it
+// passes the notification to callbacks in its delegate list which sets
+// AudioDeviceIOS's is_interrupted_ flag to true.
+// - When AudioDeviceIOS.ShutdownPlayOrRecord is called, its
+// audio_session_observer_ callback is removed from RTCAudioSessions's
+// delegate list.
+// So if RTC_OBJC_TYPE(RTCAudioSession) receives an iOS end audio interruption notification,
+// AudioDeviceIOS is not notified as its callback is not in RTC_OBJC_TYPE(RTCAudioSession)'s
+// delegate list. This causes AudioDeviceIOS's is_interrupted_ flag to be in
+// the wrong (true) state and the audio session will ignore audio changes.
+// As RTC_OBJC_TYPE(RTCAudioSession) keeps its own interrupted state, the fix is to initialize
+// AudioDeviceIOS's is_interrupted_ flag to RTC_OBJC_TYPE(RTCAudioSession)'s isInterrupted
+// flag in AudioDeviceIOS.InitPlayOrRecord.
+- (void)testInterruptedAudioSession {
+ XCTAssertTrue(self.audioSession.isActive);
+ XCTAssertTrue([self.audioSession.category isEqual:AVAudioSessionCategoryPlayAndRecord] ||
+ [self.audioSession.category isEqual:AVAudioSessionCategoryPlayback]);
+ XCTAssertEqual(AVAudioSessionModeVoiceChat, self.audioSession.mode);
+
+ std::unique_ptr<webrtc::TaskQueueFactory> task_queue_factory =
+ webrtc::CreateDefaultTaskQueueFactory();
+ std::unique_ptr<webrtc::AudioDeviceBuffer> audio_buffer;
+ audio_buffer.reset(new webrtc::AudioDeviceBuffer(task_queue_factory.get()));
+ _audio_device->AttachAudioBuffer(audio_buffer.get());
+ XCTAssertEqual(webrtc::AudioDeviceGeneric::InitStatus::OK, _audio_device->Init());
+ XCTAssertEqual(0, _audio_device->InitPlayout());
+ XCTAssertEqual(0, _audio_device->StartPlayout());
+
+ // Force interruption.
+ [self.audioSession notifyDidBeginInterruption];
+
+ // Wait for notification to propagate.
+ rtc::ThreadManager::ProcessAllMessageQueuesForTesting();
+ XCTAssertTrue(_audio_device->IsInterrupted());
+
+ // Force it for testing.
+ _audio_device->StopPlayout();
+
+ [self.audioSession notifyDidEndInterruptionWithShouldResumeSession:YES];
+ // Wait for notification to propagate.
+ rtc::ThreadManager::ProcessAllMessageQueuesForTesting();
+ XCTAssertTrue(_audio_device->IsInterrupted());
+
+ _audio_device->Init();
+ _audio_device->InitPlayout();
+ XCTAssertFalse(_audio_device->IsInterrupted());
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCAudioSessionTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCAudioSessionTest.mm
new file mode 100644
index 0000000000..f62eb46bd5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCAudioSessionTest.mm
@@ -0,0 +1,324 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <OCMock/OCMock.h>
+#import <XCTest/XCTest.h>
+
+#include <vector>
+
+#include "rtc_base/event.h"
+#include "rtc_base/gunit.h"
+
+#import "components/audio/RTCAudioSession+Private.h"
+
+#import "components/audio/RTCAudioSession.h"
+#import "components/audio/RTCAudioSessionConfiguration.h"
+
+@interface RTC_OBJC_TYPE (RTCAudioSession)
+(UnitTesting)
+
+ @property(nonatomic,
+ readonly) std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> > delegates;
+
+- (instancetype)initWithAudioSession:(id)audioSession;
+
+@end
+
+@interface MockAVAudioSession : NSObject
+
+@property (nonatomic, readwrite, assign) float outputVolume;
+
+@end
+
+@implementation MockAVAudioSession
+@synthesize outputVolume = _outputVolume;
+@end
+
+@interface RTCAudioSessionTestDelegate : NSObject <RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
+
+@property (nonatomic, readonly) float outputVolume;
+
+@end
+
+@implementation RTCAudioSessionTestDelegate
+
+@synthesize outputVolume = _outputVolume;
+
+- (instancetype)init {
+ if (self = [super init]) {
+ _outputVolume = -1;
+ }
+ return self;
+}
+
+- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ shouldResumeSession:(BOOL)shouldResumeSession {
+}
+
+- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+ reason:(AVAudioSessionRouteChangeReason)reason
+ previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
+}
+
+- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSessionShouldConfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSessionShouldUnconfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
+}
+
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+ didChangeOutputVolume:(float)outputVolume {
+ _outputVolume = outputVolume;
+}
+
+@end
+
+// A delegate that adds itself to the audio session on init and removes itself
+// in its dealloc.
+@interface RTCTestRemoveOnDeallocDelegate : RTCAudioSessionTestDelegate
+@end
+
+@implementation RTCTestRemoveOnDeallocDelegate
+
+- (instancetype)init {
+ if (self = [super init]) {
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session addDelegate:self];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ [session removeDelegate:self];
+}
+
+@end
+
+@interface RTCAudioSessionTest : XCTestCase
+
+@end
+
+@implementation RTCAudioSessionTest
+
+- (void)testAddAndRemoveDelegates {
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ NSMutableArray *delegates = [NSMutableArray array];
+ const size_t count = 5;
+ for (size_t i = 0; i < count; ++i) {
+ RTCAudioSessionTestDelegate *delegate =
+ [[RTCAudioSessionTestDelegate alloc] init];
+ [session addDelegate:delegate];
+ [delegates addObject:delegate];
+ EXPECT_EQ(i + 1, session.delegates.size());
+ }
+ [delegates enumerateObjectsUsingBlock:^(RTCAudioSessionTestDelegate *obj,
+ NSUInteger idx,
+ BOOL *stop) {
+ [session removeDelegate:obj];
+ }];
+ EXPECT_EQ(0u, session.delegates.size());
+}
+
+- (void)testPushDelegate {
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ NSMutableArray *delegates = [NSMutableArray array];
+ const size_t count = 2;
+ for (size_t i = 0; i < count; ++i) {
+ RTCAudioSessionTestDelegate *delegate =
+ [[RTCAudioSessionTestDelegate alloc] init];
+ [session addDelegate:delegate];
+ [delegates addObject:delegate];
+ }
+ // Test that it gets added to the front of the list.
+ RTCAudioSessionTestDelegate *pushedDelegate =
+ [[RTCAudioSessionTestDelegate alloc] init];
+ [session pushDelegate:pushedDelegate];
+ EXPECT_TRUE(pushedDelegate == session.delegates[0]);
+
+ // Test that it stays at the front of the list.
+ for (size_t i = 0; i < count; ++i) {
+ RTCAudioSessionTestDelegate *delegate =
+ [[RTCAudioSessionTestDelegate alloc] init];
+ [session addDelegate:delegate];
+ [delegates addObject:delegate];
+ }
+ EXPECT_TRUE(pushedDelegate == session.delegates[0]);
+
+ // Test that the next one goes to the front too.
+ pushedDelegate = [[RTCAudioSessionTestDelegate alloc] init];
+ [session pushDelegate:pushedDelegate];
+ EXPECT_TRUE(pushedDelegate == session.delegates[0]);
+}
+
+// Tests that delegates added to the audio session properly zero out. This is
+// checking an implementation detail (that vectors of __weak work as expected).
+- (void)testZeroingWeakDelegate {
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ @autoreleasepool {
+ // Add a delegate to the session. There should be one delegate at this
+ // point.
+ RTCAudioSessionTestDelegate *delegate =
+ [[RTCAudioSessionTestDelegate alloc] init];
+ [session addDelegate:delegate];
+ EXPECT_EQ(1u, session.delegates.size());
+ EXPECT_TRUE(session.delegates[0]);
+ }
+ // The previously created delegate should've de-alloced, leaving a nil ptr.
+ EXPECT_FALSE(session.delegates[0]);
+ RTCAudioSessionTestDelegate *delegate =
+ [[RTCAudioSessionTestDelegate alloc] init];
+ [session addDelegate:delegate];
+ // On adding a new delegate, nil ptrs should've been cleared.
+ EXPECT_EQ(1u, session.delegates.size());
+ EXPECT_TRUE(session.delegates[0]);
+}
+
+// Tests that we don't crash when removing delegates in dealloc.
+// Added as a regression test.
+- (void)testRemoveDelegateOnDealloc {
+ @autoreleasepool {
+ RTCTestRemoveOnDeallocDelegate *delegate =
+ [[RTCTestRemoveOnDeallocDelegate alloc] init];
+ EXPECT_TRUE(delegate);
+ }
+ RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ EXPECT_EQ(0u, session.delegates.size());
+}
+
+- (void)testAudioSessionActivation {
+ RTC_OBJC_TYPE(RTCAudioSession) *audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+ EXPECT_EQ(0, audioSession.activationCount);
+ [audioSession audioSessionDidActivate:[AVAudioSession sharedInstance]];
+ EXPECT_EQ(1, audioSession.activationCount);
+ [audioSession audioSessionDidDeactivate:[AVAudioSession sharedInstance]];
+ EXPECT_EQ(0, audioSession.activationCount);
+}
+
+// Hack - fixes OCMVerify link error
+// Link error is: Undefined symbols for architecture i386:
+// "OCMMakeLocation(objc_object*, char const*, int)", referenced from:
+// -[RTCAudioSessionTest testConfigureWebRTCSession] in RTCAudioSessionTest.o
+// ld: symbol(s) not found for architecture i386
+// REASON: https://github.com/erikdoe/ocmock/issues/238
+OCMLocation *OCMMakeLocation(id testCase, const char *fileCString, int line){
+ return [OCMLocation locationWithTestCase:testCase
+ file:[NSString stringWithUTF8String:fileCString]
+ line:line];
+}
+
+- (void)testConfigureWebRTCSession {
+ NSError *error = nil;
+
+ void (^setActiveBlock)(NSInvocation *invocation) = ^(NSInvocation *invocation) {
+ __autoreleasing NSError **retError;
+ [invocation getArgument:&retError atIndex:4];
+ *retError = [NSError errorWithDomain:@"AVAudioSession"
+ code:AVAudioSessionErrorCodeCannotInterruptOthers
+ userInfo:nil];
+ BOOL failure = NO;
+ [invocation setReturnValue:&failure];
+ };
+
+ id mockAVAudioSession = OCMPartialMock([AVAudioSession sharedInstance]);
+ OCMStub([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES
+ withOptions:0
+ error:([OCMArg anyObjectRef])])
+ .andDo(setActiveBlock);
+
+ id mockAudioSession = OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]);
+ OCMStub([mockAudioSession session]).andReturn(mockAVAudioSession);
+
+ RTC_OBJC_TYPE(RTCAudioSession) *audioSession = mockAudioSession;
+ EXPECT_EQ(0, audioSession.activationCount);
+ [audioSession lockForConfiguration];
+ // configureWebRTCSession is forced to fail in the above mock interface,
+ // so activationCount should remain 0
+ OCMExpect([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES
+ withOptions:0
+ error:([OCMArg anyObjectRef])])
+ .andDo(setActiveBlock);
+ OCMExpect([mockAudioSession session]).andReturn(mockAVAudioSession);
+ EXPECT_FALSE([audioSession configureWebRTCSession:&error]);
+ EXPECT_EQ(0, audioSession.activationCount);
+
+ id session = audioSession.session;
+ EXPECT_EQ(session, mockAVAudioSession);
+ EXPECT_EQ(NO, [mockAVAudioSession setActive:YES withOptions:0 error:&error]);
+ [audioSession unlockForConfiguration];
+
+ OCMVerify([mockAudioSession session]);
+ OCMVerify([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES withOptions:0 error:&error]);
+ OCMVerify([[mockAVAudioSession ignoringNonObjectArgs] setActive:NO withOptions:0 error:&error]);
+
+ [mockAVAudioSession stopMocking];
+ [mockAudioSession stopMocking];
+}
+
+- (void)testConfigureWebRTCSessionWithoutLocking {
+ NSError *error = nil;
+
+ id mockAVAudioSession = OCMPartialMock([AVAudioSession sharedInstance]);
+ id mockAudioSession = OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]);
+ OCMStub([mockAudioSession session]).andReturn(mockAVAudioSession);
+
+ RTC_OBJC_TYPE(RTCAudioSession) *audioSession = mockAudioSession;
+
+ std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
+ EXPECT_TRUE(thread);
+ EXPECT_TRUE(thread->Start());
+
+ rtc::Event waitLock;
+ rtc::Event waitCleanup;
+ constexpr int timeoutMs = 5000;
+ thread->PostTask([audioSession, &waitLock, &waitCleanup] {
+ [audioSession lockForConfiguration];
+ waitLock.Set();
+ waitCleanup.Wait(timeoutMs);
+ [audioSession unlockForConfiguration];
+ });
+
+ waitLock.Wait(timeoutMs);
+ [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:0 error:&error];
+ EXPECT_TRUE(error != nil);
+ EXPECT_EQ(error.domain, kRTCAudioSessionErrorDomain);
+ EXPECT_EQ(error.code, kRTCAudioSessionErrorLockRequired);
+ waitCleanup.Set();
+ thread->Stop();
+
+ [mockAVAudioSession stopMocking];
+ [mockAudioSession stopMocking];
+}
+
+- (void)testAudioVolumeDidNotify {
+ MockAVAudioSession *mockAVAudioSession = [[MockAVAudioSession alloc] init];
+ RTC_OBJC_TYPE(RTCAudioSession) *session =
+ [[RTC_OBJC_TYPE(RTCAudioSession) alloc] initWithAudioSession:mockAVAudioSession];
+ RTCAudioSessionTestDelegate *delegate =
+ [[RTCAudioSessionTestDelegate alloc] init];
+ [session addDelegate:delegate];
+
+ float expectedVolume = 0.75;
+ mockAVAudioSession.outputVolume = expectedVolume;
+
+ EXPECT_EQ(expectedVolume, delegate.outputVolume);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm
new file mode 100644
index 0000000000..3a1ab24773
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm
@@ -0,0 +1,308 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+#import "api/video_frame_buffer/RTCNativeI420Buffer+Private.h"
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "frame_buffer_helpers.h"
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "third_party/libyuv/include/libyuv.h"
+
+@interface RTCCVPixelBufferTests : XCTestCase
+@end
+
+@implementation RTCCVPixelBufferTests {
+}
+
+- (void)testRequiresCroppingNoCrop {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+
+ XCTAssertFalse([buffer requiresCropping]);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testRequiresCroppingWithCrop {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *croppedBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+ adaptedWidth:720
+ adaptedHeight:1280
+ cropWidth:360
+ cropHeight:640
+ cropX:100
+ cropY:100];
+
+ XCTAssertTrue([croppedBuffer requiresCropping]);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testRequiresScalingNoScale {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ XCTAssertFalse([buffer requiresScalingToWidth:720 height:1280]);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testRequiresScalingWithScale {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ XCTAssertTrue([buffer requiresScalingToWidth:360 height:640]);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testRequiresScalingWithScaleAndMatchingCrop {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+ adaptedWidth:720
+ adaptedHeight:1280
+ cropWidth:360
+ cropHeight:640
+ cropX:100
+ cropY:100];
+ XCTAssertFalse([buffer requiresScalingToWidth:360 height:640]);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testBufferSize_NV12 {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 576000);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testBufferSize_RGB {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(NULL, 720, 1280, kCVPixelFormatType_32BGRA, NULL, &pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 0);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)testCropAndScale_NV12 {
+ [self cropAndScaleTestWithNV12];
+}
+
+- (void)testCropAndScaleNoOp_NV12 {
+ [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputSize:CGSizeMake(720, 1280)];
+}
+
+- (void)testCropAndScale_NV12FullToVideo {
+ [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
+ outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
+}
+
+- (void)testCropAndScaleZeroSizeFrame_NV12 {
+ [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputSize:CGSizeMake(0, 0)];
+}
+
+- (void)testCropAndScaleToSmallFormat_NV12 {
+ [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputSize:CGSizeMake(148, 320)];
+}
+
+- (void)testCropAndScaleToOddFormat_NV12 {
+ [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputSize:CGSizeMake(361, 640)];
+}
+
+- (void)testCropAndScale_32BGRA {
+ [self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32BGRA];
+}
+
+- (void)testCropAndScale_32ARGB {
+ [self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB];
+}
+
+- (void)testCropAndScaleWithSmallCropInfo_32ARGB {
+ [self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB cropX:2 cropY:3];
+}
+
+- (void)testCropAndScaleWithLargeCropInfo_32ARGB {
+ [self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB cropX:200 cropY:300];
+}
+
+- (void)testToI420_NV12 {
+ [self toI420WithPixelFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
+}
+
+- (void)testToI420_32BGRA {
+ [self toI420WithPixelFormat:kCVPixelFormatType_32BGRA];
+}
+
+- (void)testToI420_32ARGB {
+ [self toI420WithPixelFormat:kCVPixelFormatType_32ARGB];
+}
+
+#pragma mark - Shared test code
+
+- (void)cropAndScaleTestWithNV12 {
+ [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
+}
+
+- (void)cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat outputFormat:(OSType)outputFormat {
+ [self cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat
+ outputFormat:(OSType)outputFormat
+ outputSize:CGSizeMake(360, 640)];
+}
+
+- (void)cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat
+ outputFormat:(OSType)outputFormat
+ outputSize:(CGSize)outputSize {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(NULL, 720, 1280, inputFormat, NULL, &pixelBufferRef);
+
+ rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(720, 1280);
+ CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ XCTAssertEqual(buffer.width, 720);
+ XCTAssertEqual(buffer.height, 1280);
+
+ CVPixelBufferRef outputPixelBufferRef = NULL;
+ CVPixelBufferCreate(
+ NULL, outputSize.width, outputSize.height, outputFormat, NULL, &outputPixelBufferRef);
+
+ std::vector<uint8_t> frameScaleBuffer;
+ if ([buffer requiresScalingToWidth:outputSize.width height:outputSize.height]) {
+ int size =
+ [buffer bufferSizeForCroppingAndScalingToWidth:outputSize.width height:outputSize.height];
+ frameScaleBuffer.resize(size);
+ } else {
+ frameScaleBuffer.clear();
+ }
+ frameScaleBuffer.shrink_to_fit();
+
+ [buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:frameScaleBuffer.data()];
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef];
+ XCTAssertEqual(scaledBuffer.width, outputSize.width);
+ XCTAssertEqual(scaledBuffer.height, outputSize.height);
+
+ if (outputSize.width > 0 && outputSize.height > 0) {
+ RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420];
+ RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420];
+ double psnr =
+ I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]);
+ XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
+ }
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)cropAndScaleTestWithRGBPixelFormat:(OSType)pixelFormat {
+ [self cropAndScaleTestWithRGBPixelFormat:pixelFormat cropX:0 cropY:0];
+}
+
+- (void)cropAndScaleTestWithRGBPixelFormat:(OSType)pixelFormat cropX:(int)cropX cropY:(int)cropY {
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(NULL, 720, 1280, pixelFormat, NULL, &pixelBufferRef);
+
+ DrawGradientInRGBPixelBuffer(pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
+ initWithPixelBuffer:pixelBufferRef
+ adaptedWidth:CVPixelBufferGetWidth(pixelBufferRef)
+ adaptedHeight:CVPixelBufferGetHeight(pixelBufferRef)
+ cropWidth:CVPixelBufferGetWidth(pixelBufferRef) - cropX
+ cropHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropY
+ cropX:cropX
+ cropY:cropY];
+
+ XCTAssertEqual(buffer.width, 720);
+ XCTAssertEqual(buffer.height, 1280);
+
+ CVPixelBufferRef outputPixelBufferRef = NULL;
+ CVPixelBufferCreate(NULL, 360, 640, pixelFormat, NULL, &outputPixelBufferRef);
+ [buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:NULL];
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef];
+ XCTAssertEqual(scaledBuffer.width, 360);
+ XCTAssertEqual(scaledBuffer.height, 640);
+
+ RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420];
+ RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420];
+ double psnr =
+ I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]);
+ XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+- (void)toI420WithPixelFormat:(OSType)pixelFormat {
+ rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(360, 640);
+
+ CVPixelBufferRef pixelBufferRef = NULL;
+ CVPixelBufferCreate(NULL, 360, 640, pixelFormat, NULL, &pixelBufferRef);
+
+ CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
+
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+ RTC_OBJC_TYPE(RTCI420Buffer) *fromCVPixelBuffer = [buffer toI420];
+
+ double psnr = I420PSNR(*i420Buffer, *[fromCVPixelBuffer nativeI420Buffer]);
+ double target = webrtc::kPerfectPSNR;
+ if (pixelFormat != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) {
+ // libyuv's I420ToRGB functions seem to lose some quality.
+ target = 19.0;
+ }
+ XCTAssertGreaterThanOrEqual(psnr, target);
+
+ CVBufferRelease(pixelBufferRef);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCCallbackLogger_xctest.m b/third_party/libwebrtc/sdk/objc/unittests/RTCCallbackLogger_xctest.m
new file mode 100644
index 0000000000..1b6fb1c07b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCCallbackLogger_xctest.m
@@ -0,0 +1,244 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "api/logging/RTCCallbackLogger.h"
+
+#import <XCTest/XCTest.h>
+
+@interface RTCCallbackLoggerTests : XCTestCase
+
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCallbackLogger) * logger;
+
+@end
+
+@implementation RTCCallbackLoggerTests
+
+@synthesize logger;
+
+- (void)setUp {
+ self.logger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init];
+}
+
+- (void)tearDown {
+ self.logger = nil;
+}
+
+- (void)testDefaultSeverityLevel {
+ XCTAssertEqual(self.logger.severity, RTCLoggingSeverityInfo);
+}
+
+- (void)testCallbackGetsCalledForAppropriateLevel {
+ self.logger.severity = RTCLoggingSeverityWarning;
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackWarning"];
+
+ [self.logger start:^(NSString *message) {
+ XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
+ [callbackExpectation fulfill];
+ }];
+
+ RTCLogError("Horrible error");
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+}
+
+- (void)testCallbackWithSeverityGetsCalledForAppropriateLevel {
+ self.logger.severity = RTCLoggingSeverityWarning;
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackWarning"];
+
+ [self.logger
+ startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity severity) {
+ XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
+ XCTAssertEqual(severity, RTCLoggingSeverityError);
+ [callbackExpectation fulfill];
+ }];
+
+ RTCLogError("Horrible error");
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+}
+
+- (void)testCallbackDoesNotGetCalledForOtherLevels {
+ self.logger.severity = RTCLoggingSeverityError;
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackError"];
+
+ [self.logger start:^(NSString *message) {
+ XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
+ [callbackExpectation fulfill];
+ }];
+
+ RTCLogInfo("Just some info");
+ RTCLogWarning("Warning warning");
+ RTCLogError("Horrible error");
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+}
+
+- (void)testCallbackWithSeverityDoesNotGetCalledForOtherLevels {
+ self.logger.severity = RTCLoggingSeverityError;
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackError"];
+
+ [self.logger
+ startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity severity) {
+ XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
+ XCTAssertEqual(severity, RTCLoggingSeverityError);
+ [callbackExpectation fulfill];
+ }];
+
+ RTCLogInfo("Just some info");
+ RTCLogWarning("Warning warning");
+ RTCLogError("Horrible error");
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+}
+
+- (void)testCallbackDoesNotgetCalledForSeverityNone {
+ self.logger.severity = RTCLoggingSeverityNone;
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"unexpectedCallback"];
+
+ [self.logger start:^(NSString *message) {
+ [callbackExpectation fulfill];
+ XCTAssertTrue(false);
+ }];
+
+ RTCLogInfo("Just some info");
+ RTCLogWarning("Warning warning");
+ RTCLogError("Horrible error");
+
+ XCTWaiter *waiter = [[XCTWaiter alloc] init];
+ XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
+ XCTAssertEqual(result, XCTWaiterResultTimedOut);
+}
+
+- (void)testCallbackWithSeverityDoesNotgetCalledForSeverityNone {
+ self.logger.severity = RTCLoggingSeverityNone;
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"unexpectedCallback"];
+
+ [self.logger
+ startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity severity) {
+ [callbackExpectation fulfill];
+ XCTAssertTrue(false);
+ }];
+
+ RTCLogInfo("Just some info");
+ RTCLogWarning("Warning warning");
+ RTCLogError("Horrible error");
+
+ XCTWaiter *waiter = [[XCTWaiter alloc] init];
+ XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
+ XCTAssertEqual(result, XCTWaiterResultTimedOut);
+}
+
+- (void)testStartingWithNilCallbackDoesNotCrash {
+ [self.logger start:nil];
+
+ RTCLogError("Horrible error");
+}
+
+- (void)testStartingWithNilCallbackWithSeverityDoesNotCrash {
+ [self.logger startWithMessageAndSeverityHandler:nil];
+
+ RTCLogError("Horrible error");
+}
+
+- (void)testStopCallbackLogger {
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"stopped"];
+
+ [self.logger start:^(NSString *message) {
+ [callbackExpectation fulfill];
+ }];
+
+ [self.logger stop];
+
+ RTCLogInfo("Just some info");
+
+ XCTWaiter *waiter = [[XCTWaiter alloc] init];
+ XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
+ XCTAssertEqual(result, XCTWaiterResultTimedOut);
+}
+
+- (void)testStopCallbackWithSeverityLogger {
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"stopped"];
+
+ [self.logger
+ startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity loggingServerity) {
+ [callbackExpectation fulfill];
+ }];
+
+ [self.logger stop];
+
+ RTCLogInfo("Just some info");
+
+ XCTWaiter *waiter = [[XCTWaiter alloc] init];
+ XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
+ XCTAssertEqual(result, XCTWaiterResultTimedOut);
+}
+
+- (void)testDestroyingCallbackLogger {
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"destroyed"];
+
+ [self.logger start:^(NSString *message) {
+ [callbackExpectation fulfill];
+ }];
+
+ self.logger = nil;
+
+ RTCLogInfo("Just some info");
+
+ XCTWaiter *waiter = [[XCTWaiter alloc] init];
+ XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
+ XCTAssertEqual(result, XCTWaiterResultTimedOut);
+}
+
+- (void)testDestroyingCallbackWithSeverityLogger {
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"destroyed"];
+
+ [self.logger
+ startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity loggingServerity) {
+ [callbackExpectation fulfill];
+ }];
+
+ self.logger = nil;
+
+ RTCLogInfo("Just some info");
+
+ XCTWaiter *waiter = [[XCTWaiter alloc] init];
+ XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
+ XCTAssertEqual(result, XCTWaiterResultTimedOut);
+}
+
+- (void)testCallbackWithSeverityLoggerCannotStartTwice {
+ self.logger.severity = RTCLoggingSeverityWarning;
+
+ XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackWarning"];
+
+ [self.logger
+ startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity loggingServerity) {
+ XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
+ XCTAssertEqual(loggingServerity, RTCLoggingSeverityError);
+ [callbackExpectation fulfill];
+ }];
+
+ [self.logger start:^(NSString *message) {
+ [callbackExpectation fulfill];
+ XCTAssertTrue(false);
+ }];
+
+ RTCLogError("Horrible error");
+
+ [self waitForExpectations:@[ callbackExpectation ] timeout:10.0];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm
new file mode 100644
index 0000000000..5018479157
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm
@@ -0,0 +1,569 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <OCMock/OCMock.h>
+#import <XCTest/XCTest.h>
+
+#if TARGET_OS_IPHONE
+#import <UIKit/UIKit.h>
+#endif
+
+#import "base/RTCVideoFrame.h"
+#import "components/capturer/RTCCameraVideoCapturer.h"
+#import "helpers/AVCaptureSession+DevicePosition.h"
+#import "helpers/RTCDispatcher.h"
+#import "helpers/scoped_cftyperef.h"
+
+#define WAIT(timeoutMs) \
+ do { \
+ id expectation = [[XCTestExpectation alloc] initWithDescription:@"Dummy"]; \
+ XCTWaiterResult res = [XCTWaiter waitForExpectations:@[ expectation ] \
+ timeout:timeoutMs / 1000.0]; \
+ XCTAssertEqual(XCTWaiterResultTimedOut, res); \
+ } while (false);
+
+#if TARGET_OS_IPHONE
+// Helper method.
+CMSampleBufferRef createTestSampleBufferRef() {
+
+ // This image is already in the testing bundle.
+ UIImage *image = [UIImage imageNamed:@"Default.png"];
+ CGSize size = image.size;
+ CGImageRef imageRef = [image CGImage];
+
+ CVPixelBufferRef pixelBuffer = nullptr;
+ CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, nil,
+ &pixelBuffer);
+
+ CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
+ // We don't care about bitsPerComponent and bytesPerRow so arbitrary value of 8 for both.
+ CGContextRef context = CGBitmapContextCreate(nil, size.width, size.height, 8, 8 * size.width,
+ rgbColorSpace, kCGImageAlphaPremultipliedFirst);
+
+ CGContextDrawImage(
+ context, CGRectMake(0, 0, CGImageGetWidth(imageRef), CGImageGetHeight(imageRef)), imageRef);
+
+ CGColorSpaceRelease(rgbColorSpace);
+ CGContextRelease(context);
+
+ // We don't really care about the timing.
+ CMSampleTimingInfo timing = {kCMTimeInvalid, kCMTimeInvalid, kCMTimeInvalid};
+ CMVideoFormatDescriptionRef description = nullptr;
+ CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &description);
+
+ CMSampleBufferRef sampleBuffer = nullptr;
+ CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, NULL, NULL, description,
+ &timing, &sampleBuffer);
+ CFRelease(pixelBuffer);
+
+ return sampleBuffer;
+
+}
+#endif
+@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
+(Tests)<AVCaptureVideoDataOutputSampleBufferDelegate> -
+ (instancetype)initWithDelegate
+ : (__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate captureSession
+ : (AVCaptureSession *)captureSession;
+@end
+
+@interface RTCCameraVideoCapturerTests : XCTestCase
+@property(nonatomic, strong) id delegateMock;
+@property(nonatomic, strong) id deviceMock;
+@property(nonatomic, strong) id captureConnectionMock;
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer;
+@end
+
+@implementation RTCCameraVideoCapturerTests
+@synthesize delegateMock = _delegateMock;
+@synthesize deviceMock = _deviceMock;
+@synthesize captureConnectionMock = _captureConnectionMock;
+@synthesize capturer = _capturer;
+
+- (void)setUp {
+ self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
+ self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]);
+ self.capturer =
+ [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock];
+ self.deviceMock = [RTCCameraVideoCapturerTests createDeviceMock];
+}
+
+- (void)tearDown {
+ [self.delegateMock stopMocking];
+ [self.deviceMock stopMocking];
+ self.delegateMock = nil;
+ self.deviceMock = nil;
+ self.capturer = nil;
+}
+
+#pragma mark - utils
+
++ (id)createDeviceMock {
+ return OCMClassMock([AVCaptureDevice class]);
+}
+
+#pragma mark - test cases
+
+- (void)testSetupSession {
+ AVCaptureSession *session = self.capturer.captureSession;
+ XCTAssertTrue(session != nil);
+
+#if TARGET_OS_IPHONE
+ XCTAssertEqual(session.sessionPreset, AVCaptureSessionPresetInputPriority);
+ XCTAssertEqual(session.usesApplicationAudioSession, NO);
+#endif
+ XCTAssertEqual(session.outputs.count, 1u);
+}
+
+- (void)testSetupSessionOutput {
+ AVCaptureVideoDataOutput *videoOutput = self.capturer.captureSession.outputs[0];
+ XCTAssertEqual(videoOutput.alwaysDiscardsLateVideoFrames, NO);
+ XCTAssertEqual(videoOutput.sampleBufferDelegate, self.capturer);
+}
+
+- (void)testSupportedFormatsForDevice {
+ // given
+ id validFormat1 = OCMClassMock([AVCaptureDeviceFormat class]);
+ CMVideoFormatDescriptionRef format;
+
+ // We don't care about width and heigth so arbitrary 123 and 456 values.
+ int width = 123;
+ int height = 456;
+ CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8PlanarFullRange, width, height,
+ nil, &format);
+ OCMStub([validFormat1 formatDescription]).andReturn(format);
+
+ id validFormat2 = OCMClassMock([AVCaptureDeviceFormat class]);
+ CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, width,
+ height, nil, &format);
+ OCMStub([validFormat2 formatDescription]).andReturn(format);
+
+ id invalidFormat = OCMClassMock([AVCaptureDeviceFormat class]);
+ CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_422YpCbCr8_yuvs, width, height, nil,
+ &format);
+ OCMStub([invalidFormat formatDescription]).andReturn(format);
+
+ NSArray *formats = @[ validFormat1, validFormat2, invalidFormat ];
+ OCMStub([self.deviceMock formats]).andReturn(formats);
+
+ // when
+ NSArray *supportedFormats =
+ [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:self.deviceMock];
+
+ // then
+ XCTAssertEqual(supportedFormats.count, 3u);
+ XCTAssertTrue([supportedFormats containsObject:validFormat1]);
+ XCTAssertTrue([supportedFormats containsObject:validFormat2]);
+ XCTAssertTrue([supportedFormats containsObject:invalidFormat]);
+
+ // cleanup
+ [validFormat1 stopMocking];
+ [validFormat2 stopMocking];
+ [invalidFormat stopMocking];
+ validFormat1 = nil;
+ validFormat2 = nil;
+ invalidFormat = nil;
+}
+
+- (void)testDelegateCallbackNotCalledWhenInvalidBuffer {
+ // given
+ CMSampleBufferRef sampleBuffer = nullptr;
+ [[self.delegateMock reject] capturer:[OCMArg any] didCaptureVideoFrame:[OCMArg any]];
+
+ // when
+ [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
+ didOutputSampleBuffer:sampleBuffer
+ fromConnection:self.captureConnectionMock];
+
+ // then
+ [self.delegateMock verify];
+}
+
+#if 0
+// See crbug.com/1404878 - XCTExpectFailure and XCTSkip are considered failures
+
+- (void)testDelegateCallbackWithValidBufferAndOrientationUpdate {
+#if TARGET_OS_IPHONE
+ XCTExpectFailure(@"Setting orientation on UIDevice is not supported");
+ [UIDevice.currentDevice setValue:@(UIDeviceOrientationPortraitUpsideDown) forKey:@"orientation"];
+ CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
+
+ // then
+ [[self.delegateMock expect] capturer:self.capturer
+ didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
+ expectedFrame) {
+ XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_270);
+ return YES;
+ }]];
+
+ // when
+ NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
+ [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
+
+ // We need to wait for the dispatch to finish.
+ WAIT(1000);
+
+ [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
+ didOutputSampleBuffer:sampleBuffer
+ fromConnection:self.captureConnectionMock];
+
+ [self.delegateMock verify];
+ CFRelease(sampleBuffer);
+#endif
+}
+
+// The XCTest framework considers functions that don't take arguments tests. This is a helper.
+- (void)testRotationCamera:(AVCaptureDevicePosition)camera
+ withOrientation:(UIDeviceOrientation)deviceOrientation {
+#if TARGET_OS_IPHONE
+ // Mock the AVCaptureConnection as we will get the camera position from the connection's
+ // input ports.
+ AVCaptureDeviceInput *inputPortMock = OCMClassMock([AVCaptureDeviceInput class]);
+ AVCaptureInputPort *captureInputPort = OCMClassMock([AVCaptureInputPort class]);
+ NSArray *inputPortsArrayMock = @[captureInputPort];
+ AVCaptureDevice *captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
+ OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts).
+ andReturn(inputPortsArrayMock);
+ OCMStub(captureInputPort.input).andReturn(inputPortMock);
+ OCMStub(inputPortMock.device).andReturn(captureDeviceMock);
+ OCMStub(captureDeviceMock.position).andReturn(camera);
+
+ XCTExpectFailure(@"Setting orientation on UIDevice is not supported");
+ [UIDevice.currentDevice setValue:@(deviceOrientation) forKey:@"orientation"];
+
+ CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
+
+ [[self.delegateMock expect] capturer:self.capturer
+ didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
+ expectedFrame) {
+ if (camera == AVCaptureDevicePositionFront) {
+ if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
+ XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_180);
+ } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
+ XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0);
+ }
+ } else if (camera == AVCaptureDevicePositionBack) {
+ if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
+ XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0);
+ } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
+ XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_180);
+ }
+ }
+ return YES;
+ }]];
+
+ NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
+ [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
+
+ // We need to wait for the dispatch to finish.
+ WAIT(1000);
+
+ [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
+ didOutputSampleBuffer:sampleBuffer
+ fromConnection:self.captureConnectionMock];
+
+ [self.delegateMock verify];
+
+ CFRelease(sampleBuffer);
+#endif
+}
+
+- (void)testRotationCameraBackLandscapeLeft {
+ [self testRotationCamera:AVCaptureDevicePositionBack
+ withOrientation:UIDeviceOrientationLandscapeLeft];
+}
+
+- (void)testRotationCameraFrontLandscapeLeft {
+ [self testRotationCamera:AVCaptureDevicePositionFront
+ withOrientation:UIDeviceOrientationLandscapeLeft];
+}
+
+- (void)testRotationCameraBackLandscapeRight {
+ [self testRotationCamera:AVCaptureDevicePositionBack
+ withOrientation:UIDeviceOrientationLandscapeRight];
+}
+
+- (void)testRotationCameraFrontLandscapeRight {
+ [self testRotationCamera:AVCaptureDevicePositionFront
+ withOrientation:UIDeviceOrientationLandscapeRight];
+}
+
+#endif
+
+- (void)setExif:(CMSampleBufferRef)sampleBuffer {
+ rtc::ScopedCFTypeRef<CFMutableDictionaryRef> exif(CFDictionaryCreateMutable(
+ kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
+ CFDictionarySetValue(exif.get(), CFSTR("LensModel"), CFSTR("iPhone SE back camera 4.15mm f/2.2"));
+ CMSetAttachment(sampleBuffer, CFSTR("{Exif}"), exif.get(), kCMAttachmentMode_ShouldPropagate);
+}
+
+#if 0
+// See crbug.com/1404878 - XCTExpectFailure and XCTSkip are considered failures
+
+- (void)testRotationFrame {
+#if TARGET_OS_IPHONE
+ // Mock the AVCaptureConnection as we will get the camera position from the connection's
+ // input ports.
+ AVCaptureDeviceInput *inputPortMock = OCMClassMock([AVCaptureDeviceInput class]);
+ AVCaptureInputPort *captureInputPort = OCMClassMock([AVCaptureInputPort class]);
+ NSArray *inputPortsArrayMock = @[captureInputPort];
+ AVCaptureDevice *captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
+ OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts).
+ andReturn(inputPortsArrayMock);
+ OCMStub(captureInputPort.input).andReturn(inputPortMock);
+ OCMStub(inputPortMock.device).andReturn(captureDeviceMock);
+ OCMStub(captureDeviceMock.position).andReturn(AVCaptureDevicePositionFront);
+
+ XCTExpectFailure(@"Setting orientation on UIDevice is not supported");
+ [UIDevice.currentDevice setValue:@(UIDeviceOrientationLandscapeLeft) forKey:@"orientation"];
+
+ CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
+
+ [[self.delegateMock expect] capturer:self.capturer
+ didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
+ expectedFrame) {
+ // Front camera and landscape left should return 180. But the frame's exif
+ // we add below says its from the back camera, so rotation should be 0.
+ XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0);
+ return YES;
+ }]];
+
+ NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
+ [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
+
+ // We need to wait for the dispatch to finish.
+ WAIT(1000);
+
+ [self setExif:sampleBuffer];
+
+ [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
+ didOutputSampleBuffer:sampleBuffer
+ fromConnection:self.captureConnectionMock];
+
+ [self.delegateMock verify];
+ CFRelease(sampleBuffer);
+#endif
+}
+
+#endif
+
+- (void)testImageExif {
+#if TARGET_OS_IPHONE
+ CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
+ [self setExif:sampleBuffer];
+
+ AVCaptureDevicePosition cameraPosition = [AVCaptureSession
+ devicePositionForSampleBuffer:sampleBuffer];
+ XCTAssertEqual(cameraPosition, AVCaptureDevicePositionBack);
+#endif
+}
+
+@end
+
+@interface RTCCameraVideoCapturerTestsWithMockedCaptureSession : XCTestCase
+@property(nonatomic, strong) id delegateMock;
+@property(nonatomic, strong) id deviceMock;
+@property(nonatomic, strong) id captureSessionMock;
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer;
+@end
+
+@implementation RTCCameraVideoCapturerTestsWithMockedCaptureSession
+@synthesize delegateMock = _delegateMock;
+@synthesize deviceMock = _deviceMock;
+@synthesize captureSessionMock = _captureSessionMock;
+@synthesize capturer = _capturer;
+
+- (void)setUp {
+ self.captureSessionMock = OCMStrictClassMock([AVCaptureSession class]);
+ OCMStub([self.captureSessionMock setSessionPreset:[OCMArg any]]);
+ OCMStub([self.captureSessionMock setUsesApplicationAudioSession:NO]);
+ OCMStub([self.captureSessionMock canAddOutput:[OCMArg any]]).andReturn(YES);
+ OCMStub([self.captureSessionMock addOutput:[OCMArg any]]);
+ OCMStub([self.captureSessionMock beginConfiguration]);
+ OCMStub([self.captureSessionMock commitConfiguration]);
+ self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
+ self.capturer =
+ [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock
+ captureSession:self.captureSessionMock];
+ self.deviceMock = [RTCCameraVideoCapturerTests createDeviceMock];
+}
+
+- (void)tearDown {
+ [self.delegateMock stopMocking];
+ [self.deviceMock stopMocking];
+ self.delegateMock = nil;
+ self.deviceMock = nil;
+ self.capturer = nil;
+ self.captureSessionMock = nil;
+}
+
+#pragma mark - test cases
+
+- (void)testStartingAndStoppingCapture {
+ id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
+ .andReturn(expectedDeviceInputMock);
+
+ OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
+ OCMStub([self.deviceMock unlockForConfiguration]);
+ OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
+ OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]);
+ OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]);
+
+ // Set expectation that the capture session should be started with correct device.
+ OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
+ OCMExpect([_captureSessionMock startRunning]);
+ OCMExpect([_captureSessionMock stopRunning]);
+
+ id format = OCMClassMock([AVCaptureDeviceFormat class]);
+ [self.capturer startCaptureWithDevice:self.deviceMock format:format fps:30];
+ [self.capturer stopCapture];
+
+ // Start capture code is dispatched async.
+ OCMVerifyAllWithDelay(_captureSessionMock, 15);
+}
+
+- (void)testStartCaptureFailingToLockForConfiguration {
+ // The captureSessionMock is a strict mock, so this test will crash if the startCapture
+ // method does not return when failing to lock for configuration.
+ OCMExpect([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(NO);
+
+ id format = OCMClassMock([AVCaptureDeviceFormat class]);
+ [self.capturer startCaptureWithDevice:self.deviceMock format:format fps:30];
+
+ // Start capture code is dispatched async.
+ OCMVerifyAllWithDelay(self.deviceMock, 15);
+}
+
+- (void)testStartingAndStoppingCaptureWithCallbacks {
+ id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
+ .andReturn(expectedDeviceInputMock);
+
+ OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
+ OCMStub([self.deviceMock unlockForConfiguration]);
+ OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
+ OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]);
+ OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]);
+
+ // Set expectation that the capture session should be started with correct device.
+ OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
+ OCMExpect([_captureSessionMock startRunning]);
+ OCMExpect([_captureSessionMock stopRunning]);
+
+ dispatch_semaphore_t completedStopSemaphore = dispatch_semaphore_create(0);
+
+ __block BOOL completedStart = NO;
+ id format = OCMClassMock([AVCaptureDeviceFormat class]);
+ [self.capturer startCaptureWithDevice:self.deviceMock
+ format:format
+ fps:30
+ completionHandler:^(NSError *error) {
+ XCTAssertEqual(error, nil);
+ completedStart = YES;
+ }];
+
+ __block BOOL completedStop = NO;
+ [self.capturer stopCaptureWithCompletionHandler:^{
+ completedStop = YES;
+ dispatch_semaphore_signal(completedStopSemaphore);
+ }];
+
+ dispatch_semaphore_wait(completedStopSemaphore,
+ dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
+ OCMVerifyAllWithDelay(_captureSessionMock, 15);
+ XCTAssertTrue(completedStart);
+ XCTAssertTrue(completedStop);
+}
+
+- (void)testStartCaptureFailingToLockForConfigurationWithCallback {
+ id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
+ .andReturn(expectedDeviceInputMock);
+
+ id errorMock = OCMClassMock([NSError class]);
+
+ OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:errorMock]]).andReturn(NO);
+ OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
+ OCMStub([self.deviceMock unlockForConfiguration]);
+
+ OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
+
+ dispatch_semaphore_t completedStartSemaphore = dispatch_semaphore_create(0);
+ __block NSError *callbackError = nil;
+
+ id format = OCMClassMock([AVCaptureDeviceFormat class]);
+ [self.capturer startCaptureWithDevice:self.deviceMock
+ format:format
+ fps:30
+ completionHandler:^(NSError *error) {
+ callbackError = error;
+ dispatch_semaphore_signal(completedStartSemaphore);
+ }];
+
+ long ret = dispatch_semaphore_wait(completedStartSemaphore,
+ dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
+ XCTAssertEqual(ret, 0);
+ XCTAssertEqual(callbackError, errorMock);
+}
+
+- (void)testStartCaptureSetsOutputDimensionsInvalidPixelFormat {
+ id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
+ OCMStub([captureDeviceInputMock deviceInputWithDevice:_deviceMock error:[OCMArg setTo:nil]])
+ .andReturn(expectedDeviceInputMock);
+
+ OCMStub([_deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
+ OCMStub([_deviceMock unlockForConfiguration]);
+ OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
+ OCMStub([_captureSessionMock addInput:expectedDeviceInputMock]);
+ OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]);
+ OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]);
+ OCMStub([_captureSessionMock startRunning]);
+ OCMStub([_captureSessionMock stopRunning]);
+
+ id deviceFormatMock = OCMClassMock([AVCaptureDeviceFormat class]);
+ CMVideoFormatDescriptionRef formatDescription;
+
+ int width = 110;
+ int height = 220;
+ FourCharCode pixelFormat = 0x18000000;
+ CMVideoFormatDescriptionCreate(nil, pixelFormat, width, height, nil, &formatDescription);
+ OCMStub([deviceFormatMock formatDescription]).andReturn(formatDescription);
+
+ [_capturer startCaptureWithDevice:_deviceMock format:deviceFormatMock fps:30];
+
+ XCTestExpectation *expectation = [self expectationWithDescription:@"StopCompletion"];
+ [_capturer stopCaptureWithCompletionHandler:^(void) {
+ [expectation fulfill];
+ }];
+
+ [self waitForExpectationsWithTimeout:15 handler:nil];
+
+ OCMVerify([_captureSessionMock
+ addOutput:[OCMArg checkWithBlock:^BOOL(AVCaptureVideoDataOutput *output) {
+ if (@available(iOS 16, *)) {
+ XCTAssertEqual(width, [output.videoSettings[(id)kCVPixelBufferWidthKey] intValue]);
+ XCTAssertEqual(height, [output.videoSettings[(id)kCVPixelBufferHeightKey] intValue]);
+ } else {
+ XCTAssertEqual(0, [output.videoSettings[(id)kCVPixelBufferWidthKey] intValue]);
+ XCTAssertEqual(0, [output.videoSettings[(id)kCVPixelBufferHeightKey] intValue]);
+ }
+ XCTAssertEqual(
+ (FourCharCode)kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
+ [output.videoSettings[(id)kCVPixelBufferPixelFormatTypeKey] unsignedIntValue]);
+ return YES;
+ }]]);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCCertificateTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCCertificateTest.mm
new file mode 100644
index 0000000000..bc1347336c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCCertificateTest.mm
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include <vector>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCConfiguration+Private.h"
+#import "api/peerconnection/RTCConfiguration.h"
+#import "api/peerconnection/RTCIceServer.h"
+#import "api/peerconnection/RTCMediaConstraints.h"
+#import "api/peerconnection/RTCPeerConnection.h"
+#import "api/peerconnection/RTCPeerConnectionFactory.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCCertificateTest : XCTestCase
+@end
+
+@implementation RTCCertificateTest
+
+- (void)testCertificateIsUsedInConfig {
+ RTC_OBJC_TYPE(RTCConfiguration) *originalConfig = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+
+ NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
+ originalConfig.iceServers = @[ server ];
+
+ // Generate a new certificate.
+ RTC_OBJC_TYPE(RTCCertificate) *originalCertificate = [RTC_OBJC_TYPE(RTCCertificate)
+ generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}];
+
+ // Store certificate in configuration.
+ originalConfig.certificate = originalCertificate;
+
+ RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+ [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+
+ // Create PeerConnection with this certificate.
+ RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
+ [factory peerConnectionWithConfiguration:originalConfig constraints:contraints delegate:nil];
+
+ // Retrieve certificate from the configuration.
+ RTC_OBJC_TYPE(RTCConfiguration) *retrievedConfig = peerConnection.configuration;
+
+ // Extract PEM strings from original certificate.
+ std::string originalPrivateKeyField = [[originalCertificate private_key] UTF8String];
+ std::string originalCertificateField = [[originalCertificate certificate] UTF8String];
+
+ // Extract PEM strings from certificate retrieved from configuration.
+ RTC_OBJC_TYPE(RTCCertificate) *retrievedCertificate = retrievedConfig.certificate;
+ std::string retrievedPrivateKeyField = [[retrievedCertificate private_key] UTF8String];
+ std::string retrievedCertificateField = [[retrievedCertificate certificate] UTF8String];
+
+ // Check that the original certificate and retrieved certificate match.
+ EXPECT_EQ(originalPrivateKeyField, retrievedPrivateKeyField);
+ EXPECT_EQ(retrievedCertificateField, retrievedCertificateField);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCConfigurationTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCConfigurationTest.mm
new file mode 100644
index 0000000000..18cc97191e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCConfigurationTest.mm
@@ -0,0 +1,162 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include <vector>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCConfiguration+Private.h"
+#import "api/peerconnection/RTCConfiguration.h"
+#import "api/peerconnection/RTCIceServer.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCConfigurationTest : XCTestCase
+@end
+
+@implementation RTCConfigurationTest
+
+- (void)testConversionToNativeConfiguration {
+ NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
+
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.iceServers = @[ server ];
+ config.iceTransportPolicy = RTCIceTransportPolicyRelay;
+ config.bundlePolicy = RTCBundlePolicyMaxBundle;
+ config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate;
+ config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled;
+ config.candidateNetworkPolicy = RTCCandidateNetworkPolicyLowCost;
+ const int maxPackets = 60;
+ const int timeout = 1;
+ const int interval = 2;
+ config.audioJitterBufferMaxPackets = maxPackets;
+ config.audioJitterBufferFastAccelerate = YES;
+ config.iceConnectionReceivingTimeout = timeout;
+ config.iceBackupCandidatePairPingInterval = interval;
+ config.continualGatheringPolicy =
+ RTCContinualGatheringPolicyGatherContinually;
+ config.shouldPruneTurnPorts = YES;
+ config.cryptoOptions =
+ [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
+ srtpEnableAes128Sha1_32CryptoCipher:YES
+ srtpEnableEncryptedRtpHeaderExtensions:YES
+ sframeRequireFrameEncryption:YES];
+ config.rtcpAudioReportIntervalMs = 2500;
+ config.rtcpVideoReportIntervalMs = 3750;
+
+ std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration>
+ nativeConfig([config createNativeConfiguration]);
+ EXPECT_TRUE(nativeConfig.get());
+ EXPECT_EQ(1u, nativeConfig->servers.size());
+ webrtc::PeerConnectionInterface::IceServer nativeServer =
+ nativeConfig->servers.front();
+ EXPECT_EQ(1u, nativeServer.urls.size());
+ EXPECT_EQ("stun:stun1.example.net", nativeServer.urls.front());
+
+ EXPECT_EQ(webrtc::PeerConnectionInterface::kRelay, nativeConfig->type);
+ EXPECT_EQ(webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle,
+ nativeConfig->bundle_policy);
+ EXPECT_EQ(webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate,
+ nativeConfig->rtcp_mux_policy);
+ EXPECT_EQ(webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled,
+ nativeConfig->tcp_candidate_policy);
+ EXPECT_EQ(webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost,
+ nativeConfig->candidate_network_policy);
+ EXPECT_EQ(maxPackets, nativeConfig->audio_jitter_buffer_max_packets);
+ EXPECT_EQ(true, nativeConfig->audio_jitter_buffer_fast_accelerate);
+ EXPECT_EQ(timeout, nativeConfig->ice_connection_receiving_timeout);
+ EXPECT_EQ(interval, nativeConfig->ice_backup_candidate_pair_ping_interval);
+ EXPECT_EQ(webrtc::PeerConnectionInterface::GATHER_CONTINUALLY,
+ nativeConfig->continual_gathering_policy);
+ EXPECT_EQ(true, nativeConfig->prune_turn_ports);
+ EXPECT_EQ(true, nativeConfig->crypto_options->srtp.enable_gcm_crypto_suites);
+ EXPECT_EQ(true, nativeConfig->crypto_options->srtp.enable_aes128_sha1_32_crypto_cipher);
+ EXPECT_EQ(true, nativeConfig->crypto_options->srtp.enable_encrypted_rtp_header_extensions);
+ EXPECT_EQ(true, nativeConfig->crypto_options->sframe.require_frame_encryption);
+ EXPECT_EQ(2500, nativeConfig->audio_rtcp_report_interval_ms());
+ EXPECT_EQ(3750, nativeConfig->video_rtcp_report_interval_ms());
+}
+
+- (void)testNativeConversionToConfiguration {
+ NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
+
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.iceServers = @[ server ];
+ config.iceTransportPolicy = RTCIceTransportPolicyRelay;
+ config.bundlePolicy = RTCBundlePolicyMaxBundle;
+ config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate;
+ config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled;
+ config.candidateNetworkPolicy = RTCCandidateNetworkPolicyLowCost;
+ const int maxPackets = 60;
+ const int timeout = 1;
+ const int interval = 2;
+ config.audioJitterBufferMaxPackets = maxPackets;
+ config.audioJitterBufferFastAccelerate = YES;
+ config.iceConnectionReceivingTimeout = timeout;
+ config.iceBackupCandidatePairPingInterval = interval;
+ config.continualGatheringPolicy =
+ RTCContinualGatheringPolicyGatherContinually;
+ config.shouldPruneTurnPorts = YES;
+ config.cryptoOptions =
+ [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
+ srtpEnableAes128Sha1_32CryptoCipher:NO
+ srtpEnableEncryptedRtpHeaderExtensions:NO
+ sframeRequireFrameEncryption:NO];
+ config.rtcpAudioReportIntervalMs = 1500;
+ config.rtcpVideoReportIntervalMs = 2150;
+
+ webrtc::PeerConnectionInterface::RTCConfiguration *nativeConfig =
+ [config createNativeConfiguration];
+ RTC_OBJC_TYPE(RTCConfiguration) *newConfig =
+ [[RTC_OBJC_TYPE(RTCConfiguration) alloc] initWithNativeConfiguration:*nativeConfig];
+ EXPECT_EQ([config.iceServers count], newConfig.iceServers.count);
+ RTC_OBJC_TYPE(RTCIceServer) *newServer = newConfig.iceServers[0];
+ RTC_OBJC_TYPE(RTCIceServer) *origServer = config.iceServers[0];
+ EXPECT_EQ(origServer.urlStrings.count, server.urlStrings.count);
+ std::string origUrl = origServer.urlStrings.firstObject.UTF8String;
+ std::string url = newServer.urlStrings.firstObject.UTF8String;
+ EXPECT_EQ(origUrl, url);
+
+ EXPECT_EQ(config.iceTransportPolicy, newConfig.iceTransportPolicy);
+ EXPECT_EQ(config.bundlePolicy, newConfig.bundlePolicy);
+ EXPECT_EQ(config.rtcpMuxPolicy, newConfig.rtcpMuxPolicy);
+ EXPECT_EQ(config.tcpCandidatePolicy, newConfig.tcpCandidatePolicy);
+ EXPECT_EQ(config.candidateNetworkPolicy, newConfig.candidateNetworkPolicy);
+ EXPECT_EQ(config.audioJitterBufferMaxPackets, newConfig.audioJitterBufferMaxPackets);
+ EXPECT_EQ(config.audioJitterBufferFastAccelerate, newConfig.audioJitterBufferFastAccelerate);
+ EXPECT_EQ(config.iceConnectionReceivingTimeout, newConfig.iceConnectionReceivingTimeout);
+ EXPECT_EQ(config.iceBackupCandidatePairPingInterval,
+ newConfig.iceBackupCandidatePairPingInterval);
+ EXPECT_EQ(config.continualGatheringPolicy, newConfig.continualGatheringPolicy);
+ EXPECT_EQ(config.shouldPruneTurnPorts, newConfig.shouldPruneTurnPorts);
+ EXPECT_EQ(config.cryptoOptions.srtpEnableGcmCryptoSuites,
+ newConfig.cryptoOptions.srtpEnableGcmCryptoSuites);
+ EXPECT_EQ(config.cryptoOptions.srtpEnableAes128Sha1_32CryptoCipher,
+ newConfig.cryptoOptions.srtpEnableAes128Sha1_32CryptoCipher);
+ EXPECT_EQ(config.cryptoOptions.srtpEnableEncryptedRtpHeaderExtensions,
+ newConfig.cryptoOptions.srtpEnableEncryptedRtpHeaderExtensions);
+ EXPECT_EQ(config.cryptoOptions.sframeRequireFrameEncryption,
+ newConfig.cryptoOptions.sframeRequireFrameEncryption);
+ EXPECT_EQ(config.rtcpAudioReportIntervalMs, newConfig.rtcpAudioReportIntervalMs);
+ EXPECT_EQ(config.rtcpVideoReportIntervalMs, newConfig.rtcpVideoReportIntervalMs);
+}
+
+- (void)testDefaultValues {
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ EXPECT_EQ(config.cryptoOptions, nil);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm
new file mode 100644
index 0000000000..ccebd74198
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCDataChannelConfiguration+Private.h"
+#import "api/peerconnection/RTCDataChannelConfiguration.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCDataChannelConfigurationTest : XCTestCase
+@end
+
+@implementation RTCDataChannelConfigurationTest
+
+- (void)testConversionToNativeDataChannelInit {
+ BOOL isOrdered = NO;
+ int maxPacketLifeTime = 5;
+ int maxRetransmits = 4;
+ BOOL isNegotiated = YES;
+ int channelId = 4;
+ NSString *protocol = @"protocol";
+
+ RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig =
+ [[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init];
+ dataChannelConfig.isOrdered = isOrdered;
+ dataChannelConfig.maxPacketLifeTime = maxPacketLifeTime;
+ dataChannelConfig.maxRetransmits = maxRetransmits;
+ dataChannelConfig.isNegotiated = isNegotiated;
+ dataChannelConfig.channelId = channelId;
+ dataChannelConfig.protocol = protocol;
+
+ webrtc::DataChannelInit nativeInit = dataChannelConfig.nativeDataChannelInit;
+ EXPECT_EQ(isOrdered, nativeInit.ordered);
+ EXPECT_EQ(maxPacketLifeTime, nativeInit.maxRetransmitTime);
+ EXPECT_EQ(maxRetransmits, nativeInit.maxRetransmits);
+ EXPECT_EQ(isNegotiated, nativeInit.negotiated);
+ EXPECT_EQ(channelId, nativeInit.id);
+ EXPECT_EQ(protocol.stdString, nativeInit.protocol);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m b/third_party/libwebrtc/sdk/objc/unittests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m
new file mode 100644
index 0000000000..02bef9bfb7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <XCTest/XCTest.h>
+
+#import <Foundation/Foundation.h>
+
+#import <WebRTC/WebRTC.h>
+
+@interface RTCDoNotPutCPlusPlusInFrameworkHeaders : XCTestCase
+@end
+
+@implementation RTCDoNotPutCPlusPlusInFrameworkHeaders
+
+- (void)testNoCPlusPlusInFrameworkHeaders {
+ NSString *fullPath = [NSString stringWithFormat:@"%s", __FILE__];
+ NSString *extension = fullPath.pathExtension;
+
+ XCTAssertEqualObjects(
+ @"m", extension, @"Do not rename %@. It should end with .m.", fullPath.lastPathComponent);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCEncodedImage_xctest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCEncodedImage_xctest.mm
new file mode 100644
index 0000000000..84804fee87
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCEncodedImage_xctest.mm
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "api/peerconnection/RTCEncodedImage+Private.h"
+
+#import <XCTest/XCTest.h>
+
+@interface RTCEncodedImageTests : XCTestCase
+@end
+
+@implementation RTCEncodedImageTests
+
+- (void)testInitializedWithNativeEncodedImage {
+ const auto encoded_data = webrtc::EncodedImageBuffer::Create();
+ webrtc::EncodedImage encoded_image;
+ encoded_image.SetEncodedData(encoded_data);
+
+ RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage =
+ [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image];
+
+ XCTAssertEqual([encodedImage nativeEncodedImage].GetEncodedData(), encoded_data);
+}
+
+- (void)testInitWithNSData {
+ NSData *bufferData = [NSData data];
+ RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init];
+ encodedImage.buffer = bufferData;
+
+ webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage];
+ XCTAssertTrue(result_encoded_image.GetEncodedData() != nullptr);
+ XCTAssertEqual(result_encoded_image.GetEncodedData()->data(), bufferData.bytes);
+}
+
+- (void)testRetainsNativeEncodedImage {
+ RTC_OBJC_TYPE(RTCEncodedImage) * encodedImage;
+ {
+ const auto encoded_data = webrtc::EncodedImageBuffer::Create();
+ webrtc::EncodedImage encoded_image;
+ encoded_image.SetEncodedData(encoded_data);
+ encodedImage =
+ [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image];
+ }
+ webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage];
+ XCTAssertTrue(result_encoded_image.GetEncodedData() != nullptr);
+ XCTAssertTrue(result_encoded_image.GetEncodedData()->data() != nullptr);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm
new file mode 100644
index 0000000000..2407c88c1a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "components/capturer/RTCFileVideoCapturer.h"
+
+#import <XCTest/XCTest.h>
+
+#include "rtc_base/gunit.h"
+
+NSString *const kTestFileName = @"foreman.mp4";
+static const int kTestTimeoutMs = 5 * 1000; // 5secs.
+
+@interface MockCapturerDelegate : NSObject <RTC_OBJC_TYPE (RTCVideoCapturerDelegate)>
+
+@property(nonatomic, assign) NSInteger capturedFramesCount;
+
+@end
+
+@implementation MockCapturerDelegate
+@synthesize capturedFramesCount = _capturedFramesCount;
+
+- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer
+ didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ self.capturedFramesCount++;
+}
+
+@end
+
+NS_CLASS_AVAILABLE_IOS(10)
+@interface RTCFileVideoCapturerTests : XCTestCase
+
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCFileVideoCapturer) * capturer;
+@property(nonatomic, strong) MockCapturerDelegate *mockDelegate;
+
+@end
+
+@implementation RTCFileVideoCapturerTests
+@synthesize capturer = _capturer;
+@synthesize mockDelegate = _mockDelegate;
+
+- (void)setUp {
+ self.mockDelegate = [[MockCapturerDelegate alloc] init];
+ self.capturer = [[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc] initWithDelegate:self.mockDelegate];
+}
+
+- (void)tearDown {
+ self.capturer = nil;
+ self.mockDelegate = nil;
+}
+
+- (void)testCaptureWhenFileNotInBundle {
+ __block BOOL errorOccured = NO;
+
+ RTCFileVideoCapturerErrorBlock errorBlock = ^void(NSError *error) {
+ errorOccured = YES;
+ };
+
+ [self.capturer startCapturingFromFileNamed:@"not_in_bundle.mov" onError:errorBlock];
+ ASSERT_TRUE_WAIT(errorOccured, kTestTimeoutMs);
+}
+
+- (void)testSecondStartCaptureCallFails {
+ __block BOOL secondError = NO;
+
+ RTCFileVideoCapturerErrorBlock firstErrorBlock = ^void(NSError *error) {
+ // This block should never be called.
+ NSLog(@"Error: %@", [error userInfo]);
+ ASSERT_TRUE(false);
+ };
+
+ RTCFileVideoCapturerErrorBlock secondErrorBlock = ^void(NSError *error) {
+ secondError = YES;
+ };
+
+ [self.capturer startCapturingFromFileNamed:kTestFileName onError:firstErrorBlock];
+ [self.capturer startCapturingFromFileNamed:kTestFileName onError:secondErrorBlock];
+
+ ASSERT_TRUE_WAIT(secondError, kTestTimeoutMs);
+}
+
+- (void)testStartStopCapturer {
+#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
+ if (@available(iOS 10, *)) {
+ [self.capturer startCapturingFromFileNamed:kTestFileName onError:nil];
+
+ __block BOOL done = NO;
+ __block NSInteger capturedFrames = -1;
+ NSInteger capturedFramesAfterStop = -1;
+
+ // We're dispatching the `stopCapture` with delay to ensure the capturer has
+ // had the chance to capture several frames.
+ dispatch_time_t captureDelay = dispatch_time(DISPATCH_TIME_NOW, 2 * NSEC_PER_SEC); // 2secs.
+ dispatch_after(captureDelay, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ capturedFrames = self.mockDelegate.capturedFramesCount;
+ [self.capturer stopCapture];
+ done = YES;
+ });
+ WAIT(done, kTestTimeoutMs);
+
+ capturedFramesAfterStop = self.mockDelegate.capturedFramesCount;
+ ASSERT_TRUE(capturedFrames != -1);
+ ASSERT_EQ(capturedFrames, capturedFramesAfterStop);
+ }
+#endif
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m b/third_party/libwebrtc/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m
new file mode 100644
index 0000000000..ec9dc41796
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "components/video_codec/RTCH264ProfileLevelId.h"
+
+#import <XCTest/XCTest.h>
+
+@interface RTCH264ProfileLevelIdTests : XCTestCase
+
+@end
+
+static NSString *level31ConstrainedHigh = @"640c1f";
+static NSString *level31ConstrainedBaseline = @"42e01f";
+
+@implementation RTCH264ProfileLevelIdTests
+
+- (void)testInitWithString {
+ RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
+ [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedHigh];
+ XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedHigh);
+ XCTAssertEqual(profileLevelId.level, RTCH264Level3_1);
+
+ profileLevelId =
+ [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedBaseline];
+ XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedBaseline);
+ XCTAssertEqual(profileLevelId.level, RTCH264Level3_1);
+}
+
+- (void)testInitWithProfileAndLevel {
+ RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
+ [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithProfile:RTCH264ProfileConstrainedHigh
+ level:RTCH264Level3_1];
+ XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedHigh);
+
+ profileLevelId = [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc]
+ initWithProfile:RTCH264ProfileConstrainedBaseline
+ level:RTCH264Level3_1];
+ XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedBaseline);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCIceCandidateTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCIceCandidateTest.mm
new file mode 100644
index 0000000000..576411985d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCIceCandidateTest.mm
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include <memory>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCIceCandidate+Private.h"
+#import "api/peerconnection/RTCIceCandidate.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCIceCandidateTest : XCTestCase
+@end
+
+@implementation RTCIceCandidateTest
+
+- (void)testCandidate {
+ NSString *sdp = @"candidate:4025901590 1 udp 2122265343 "
+ "fdff:2642:12a6:fe38:c001:beda:fcf9:51aa "
+ "59052 typ host generation 0";
+
+ RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
+ [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp sdpMLineIndex:0 sdpMid:@"audio"];
+
+ std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate =
+ candidate.nativeCandidate;
+ EXPECT_EQ("audio", nativeCandidate->sdp_mid());
+ EXPECT_EQ(0, nativeCandidate->sdp_mline_index());
+
+ std::string sdpString;
+ nativeCandidate->ToString(&sdpString);
+ EXPECT_EQ(sdp.stdString, sdpString);
+}
+
+- (void)testInitFromNativeCandidate {
+ std::string sdp("candidate:4025901590 1 udp 2122265343 "
+ "fdff:2642:12a6:fe38:c001:beda:fcf9:51aa "
+ "59052 typ host generation 0");
+ webrtc::IceCandidateInterface *nativeCandidate =
+ webrtc::CreateIceCandidate("audio", 0, sdp, nullptr);
+
+ RTC_OBJC_TYPE(RTCIceCandidate) *iceCandidate =
+ [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:nativeCandidate];
+ EXPECT_TRUE([@"audio" isEqualToString:iceCandidate.sdpMid]);
+ EXPECT_EQ(0, iceCandidate.sdpMLineIndex);
+
+ EXPECT_EQ(sdp, iceCandidate.sdp.stdString);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCIceServerTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCIceServerTest.mm
new file mode 100644
index 0000000000..772653c4dc
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCIceServerTest.mm
@@ -0,0 +1,136 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include <vector>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCIceServer+Private.h"
+#import "api/peerconnection/RTCIceServer.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCIceServerTest : XCTestCase
+@end
+
+@implementation RTCIceServerTest
+
+- (void)testOneURLServer {
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"stun:stun1.example.net" ]];
+
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
+ EXPECT_EQ(1u, iceStruct.urls.size());
+ EXPECT_EQ("stun:stun1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("", iceStruct.username);
+ EXPECT_EQ("", iceStruct.password);
+}
+
+- (void)testTwoURLServer {
+ RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc]
+ initWithURLStrings:@[ @"turn1:turn1.example.net", @"turn2:turn2.example.net" ]];
+
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
+ EXPECT_EQ(2u, iceStruct.urls.size());
+ EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("turn2:turn2.example.net", iceStruct.urls.back());
+ EXPECT_EQ("", iceStruct.username);
+ EXPECT_EQ("", iceStruct.password);
+}
+
+- (void)testPasswordCredential {
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
+ username:@"username"
+ credential:@"credential"];
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
+ EXPECT_EQ(1u, iceStruct.urls.size());
+ EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("username", iceStruct.username);
+ EXPECT_EQ("credential", iceStruct.password);
+}
+
+- (void)testHostname {
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
+ username:@"username"
+ credential:@"credential"
+ tlsCertPolicy:RTCTlsCertPolicySecure
+ hostname:@"hostname"];
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
+ EXPECT_EQ(1u, iceStruct.urls.size());
+ EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("username", iceStruct.username);
+ EXPECT_EQ("credential", iceStruct.password);
+ EXPECT_EQ("hostname", iceStruct.hostname);
+}
+
+- (void)testTlsAlpnProtocols {
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
+ username:@"username"
+ credential:@"credential"
+ tlsCertPolicy:RTCTlsCertPolicySecure
+ hostname:@"hostname"
+ tlsAlpnProtocols:@[ @"proto1", @"proto2" ]];
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
+ EXPECT_EQ(1u, iceStruct.urls.size());
+ EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("username", iceStruct.username);
+ EXPECT_EQ("credential", iceStruct.password);
+ EXPECT_EQ("hostname", iceStruct.hostname);
+ EXPECT_EQ(2u, iceStruct.tls_alpn_protocols.size());
+}
+
+- (void)testTlsEllipticCurves {
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
+ username:@"username"
+ credential:@"credential"
+ tlsCertPolicy:RTCTlsCertPolicySecure
+ hostname:@"hostname"
+ tlsAlpnProtocols:@[ @"proto1", @"proto2" ]
+ tlsEllipticCurves:@[ @"curve1", @"curve2" ]];
+ webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
+ EXPECT_EQ(1u, iceStruct.urls.size());
+ EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
+ EXPECT_EQ("username", iceStruct.username);
+ EXPECT_EQ("credential", iceStruct.password);
+ EXPECT_EQ("hostname", iceStruct.hostname);
+ EXPECT_EQ(2u, iceStruct.tls_alpn_protocols.size());
+ EXPECT_EQ(2u, iceStruct.tls_elliptic_curves.size());
+}
+
+- (void)testInitFromNativeServer {
+ webrtc::PeerConnectionInterface::IceServer nativeServer;
+ nativeServer.username = "username";
+ nativeServer.password = "password";
+ nativeServer.urls.push_back("stun:stun.example.net");
+ nativeServer.hostname = "hostname";
+ nativeServer.tls_alpn_protocols.push_back("proto1");
+ nativeServer.tls_alpn_protocols.push_back("proto2");
+ nativeServer.tls_elliptic_curves.push_back("curve1");
+ nativeServer.tls_elliptic_curves.push_back("curve2");
+
+ RTC_OBJC_TYPE(RTCIceServer) *iceServer =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithNativeServer:nativeServer];
+ EXPECT_EQ(1u, iceServer.urlStrings.count);
+ EXPECT_EQ("stun:stun.example.net",
+ [NSString stdStringForString:iceServer.urlStrings.firstObject]);
+ EXPECT_EQ("username", [NSString stdStringForString:iceServer.username]);
+ EXPECT_EQ("password", [NSString stdStringForString:iceServer.credential]);
+ EXPECT_EQ("hostname", [NSString stdStringForString:iceServer.hostname]);
+ EXPECT_EQ(2u, iceServer.tlsAlpnProtocols.count);
+ EXPECT_EQ(2u, iceServer.tlsEllipticCurves.count);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCMTLVideoView_xctest.m b/third_party/libwebrtc/sdk/objc/unittests/RTCMTLVideoView_xctest.m
new file mode 100644
index 0000000000..f152eeec91
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCMTLVideoView_xctest.m
@@ -0,0 +1,298 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <XCTest/XCTest.h>
+
+#import <Foundation/Foundation.h>
+#import <MetalKit/MetalKit.h>
+#import <OCMock/OCMock.h>
+
+#import "components/renderer/metal/RTCMTLVideoView.h"
+
+#import "api/video_frame_buffer/RTCNativeI420Buffer.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/renderer/metal/RTCMTLNV12Renderer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+static size_t kBufferWidth = 200;
+static size_t kBufferHeight = 200;
+
+// Extension of RTC_OBJC_TYPE(RTCMTLVideoView) for testing purposes.
+@interface RTC_OBJC_TYPE (RTCMTLVideoView)
+(Testing)
+
+ @property(nonatomic, readonly) MTKView *metalView;
+
++ (BOOL)isMetalAvailable;
++ (UIView *)createMetalView:(CGRect)frame;
++ (id<RTCMTLRenderer>)createNV12Renderer;
++ (id<RTCMTLRenderer>)createI420Renderer;
+- (void)drawInMTKView:(id)view;
+@end
+
+@interface RTCMTLVideoViewTests : XCTestCase
+@property(nonatomic, strong) id classMock;
+@property(nonatomic, strong) id rendererNV12Mock;
+@property(nonatomic, strong) id rendererI420Mock;
+@property(nonatomic, strong) id frameMock;
+@end
+
+@implementation RTCMTLVideoViewTests
+
+@synthesize classMock = _classMock;
+@synthesize rendererNV12Mock = _rendererNV12Mock;
+@synthesize rendererI420Mock = _rendererI420Mock;
+@synthesize frameMock = _frameMock;
+
+- (void)setUp {
+ self.classMock = OCMClassMock([RTC_OBJC_TYPE(RTCMTLVideoView) class]);
+ [self startMockingNilView];
+}
+
+- (void)tearDown {
+ [self.classMock stopMocking];
+ [self.rendererI420Mock stopMocking];
+ [self.rendererNV12Mock stopMocking];
+ [self.frameMock stopMocking];
+ self.classMock = nil;
+ self.rendererI420Mock = nil;
+ self.rendererNV12Mock = nil;
+ self.frameMock = nil;
+}
+
+- (id)frameMockWithCVPixelBuffer:(BOOL)hasCVPixelBuffer {
+ id frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]);
+ if (hasCVPixelBuffer) {
+ CVPixelBufferRef pixelBufferRef;
+ CVPixelBufferCreate(kCFAllocatorDefault,
+ kBufferWidth,
+ kBufferHeight,
+ kCVPixelFormatType_420YpCbCr8Planar,
+ nil,
+ &pixelBufferRef);
+ OCMStub([frameMock buffer])
+ .andReturn([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]);
+ } else {
+ OCMStub([frameMock buffer])
+ .andReturn([[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithWidth:kBufferWidth
+ height:kBufferHeight]);
+ }
+ OCMStub([((RTC_OBJC_TYPE(RTCVideoFrame) *)frameMock) width]).andReturn(kBufferWidth);
+ OCMStub([((RTC_OBJC_TYPE(RTCVideoFrame) *)frameMock) height]).andReturn(kBufferHeight);
+ OCMStub([frameMock timeStampNs]).andReturn(arc4random_uniform(INT_MAX));
+ return frameMock;
+}
+
+- (id)rendererMockWithSuccessfulSetup:(BOOL)success {
+ id rendererMock = OCMClassMock([RTCMTLRenderer class]);
+ OCMStub([rendererMock addRenderingDestination:[OCMArg any]]).andReturn(success);
+ return rendererMock;
+}
+
+- (void)startMockingNilView {
+ // Use OCMock 2 syntax here until OCMock is upgraded to 3.4
+ [[[self.classMock stub] andReturn:nil] createMetalView:CGRectZero];
+}
+
+#pragma mark - Test cases
+
+- (void)testInitAssertsIfMetalUnavailabe {
+ // given
+ OCMStub([self.classMock isMetalAvailable]).andReturn(NO);
+
+ // when
+ BOOL asserts = NO;
+ @try {
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
+ (void)realView;
+ } @catch (NSException *ex) {
+ asserts = YES;
+ }
+
+ XCTAssertTrue(asserts);
+}
+
+- (void)testRTCVideoRenderNilFrameCallback {
+ // given
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+ self.frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]);
+
+ [[self.frameMock reject] buffer];
+ [[self.classMock reject] createNV12Renderer];
+ [[self.classMock reject] createI420Renderer];
+
+ // when
+ [realView renderFrame:nil];
+ [realView drawInMTKView:realView.metalView];
+
+ // then
+ [self.frameMock verify];
+ [self.classMock verify];
+}
+
+- (void)testRTCVideoRenderFrameCallbackI420 {
+ // given
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+ self.rendererI420Mock = [self rendererMockWithSuccessfulSetup:YES];
+ self.frameMock = [self frameMockWithCVPixelBuffer:NO];
+
+ OCMExpect([self.rendererI420Mock drawFrame:self.frameMock]);
+ OCMExpect([self.classMock createI420Renderer]).andReturn(self.rendererI420Mock);
+ [[self.classMock reject] createNV12Renderer];
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+
+ // when
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+
+ // then
+ [self.rendererI420Mock verify];
+ [self.classMock verify];
+}
+
+- (void)testRTCVideoRenderFrameCallbackNV12 {
+ // given
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+ self.rendererNV12Mock = [self rendererMockWithSuccessfulSetup:YES];
+ self.frameMock = [self frameMockWithCVPixelBuffer:YES];
+
+ OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
+ OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
+ [[self.classMock reject] createI420Renderer];
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+
+ // when
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+
+ // then
+ [self.rendererNV12Mock verify];
+ [self.classMock verify];
+}
+
+- (void)testRTCVideoRenderWorksAfterReconstruction {
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+ self.rendererNV12Mock = [self rendererMockWithSuccessfulSetup:YES];
+ self.frameMock = [self frameMockWithCVPixelBuffer:YES];
+
+ OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
+ OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
+ [[self.classMock reject] createI420Renderer];
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+ [self.rendererNV12Mock verify];
+ [self.classMock verify];
+
+ // Recreate view.
+ realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+ OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
+ // View hould reinit renderer.
+ OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
+
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+ [self.rendererNV12Mock verify];
+ [self.classMock verify];
+}
+
+- (void)testDontRedrawOldFrame {
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+ self.rendererNV12Mock = [self rendererMockWithSuccessfulSetup:YES];
+ self.frameMock = [self frameMockWithCVPixelBuffer:YES];
+
+ OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
+ OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
+ [[self.classMock reject] createI420Renderer];
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+
+ [self.rendererNV12Mock verify];
+ [self.classMock verify];
+
+ [[self.rendererNV12Mock reject] drawFrame:[OCMArg any]];
+
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+
+ [self.rendererNV12Mock verify];
+}
+
+- (void)testDoDrawNewFrame {
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+ self.rendererNV12Mock = [self rendererMockWithSuccessfulSetup:YES];
+ self.frameMock = [self frameMockWithCVPixelBuffer:YES];
+
+ OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
+ OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
+ [[self.classMock reject] createI420Renderer];
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+
+ [self.rendererNV12Mock verify];
+ [self.classMock verify];
+
+ // Get new frame.
+ self.frameMock = [self frameMockWithCVPixelBuffer:YES];
+ OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
+
+ [realView renderFrame:self.frameMock];
+ [realView drawInMTKView:realView.metalView];
+
+ [self.rendererNV12Mock verify];
+}
+
+- (void)testReportsSizeChangesToDelegate {
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+
+ id delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoViewDelegate)));
+ CGSize size = CGSizeMake(640, 480);
+ OCMExpect([delegateMock videoView:[OCMArg any] didChangeVideoSize:size]);
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+ [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+ realView.delegate = delegateMock;
+ [realView setSize:size];
+
+ // Delegate method is invoked with a dispatch_async.
+ OCMVerifyAllWithDelay(delegateMock, 1);
+}
+
+- (void)testSetContentMode {
+ OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
+ id metalKitView = OCMClassMock([MTKView class]);
+ [[[[self.classMock stub] ignoringNonObjectArgs] andReturn:metalKitView]
+ createMetalView:CGRectZero];
+ OCMExpect([metalKitView setContentMode:UIViewContentModeScaleAspectFill]);
+
+ RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] init];
+ [realView setVideoContentMode:UIViewContentModeScaleAspectFill];
+
+ OCMVerify(metalKitView);
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCMediaConstraintsTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCMediaConstraintsTest.mm
new file mode 100644
index 0000000000..6ed7859ba1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCMediaConstraintsTest.mm
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include <memory>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCMediaConstraints+Private.h"
+#import "api/peerconnection/RTCMediaConstraints.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCMediaConstraintsTests : XCTestCase
+@end
+
+@implementation RTCMediaConstraintsTests
+
+- (void)testMediaConstraints {
+ NSDictionary *mandatory = @{@"key1": @"value1", @"key2": @"value2"};
+ NSDictionary *optional = @{@"key3": @"value3", @"key4": @"value4"};
+
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatory
+ optionalConstraints:optional];
+ std::unique_ptr<webrtc::MediaConstraints> nativeConstraints =
+ [constraints nativeConstraints];
+
+ webrtc::MediaConstraints::Constraints nativeMandatory = nativeConstraints->GetMandatory();
+ [self expectConstraints:mandatory inNativeConstraints:nativeMandatory];
+
+ webrtc::MediaConstraints::Constraints nativeOptional = nativeConstraints->GetOptional();
+ [self expectConstraints:optional inNativeConstraints:nativeOptional];
+}
+
+- (void)expectConstraints:(NSDictionary *)constraints
+ inNativeConstraints:(webrtc::MediaConstraints::Constraints)nativeConstraints {
+ EXPECT_EQ(constraints.count, nativeConstraints.size());
+
+ for (NSString *key in constraints) {
+ NSString *value = [constraints objectForKey:key];
+
+ std::string nativeValue;
+ bool found = nativeConstraints.FindFirst(key.stdString, &nativeValue);
+ EXPECT_TRUE(found);
+ EXPECT_EQ(value.stdString, nativeValue);
+ }
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCNV12TextureCache_xctest.m b/third_party/libwebrtc/sdk/objc/unittests/RTCNV12TextureCache_xctest.m
new file mode 100644
index 0000000000..7bdc538f67
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCNV12TextureCache_xctest.m
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <CoreVideo/CoreVideo.h>
+#import <Foundation/Foundation.h>
+#import <GLKit/GLKit.h>
+#import <XCTest/XCTest.h>
+
+#import "base/RTCVideoFrame.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/renderer/opengl/RTCNV12TextureCache.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+
+@interface RTCNV12TextureCacheTests : XCTestCase
+@end
+
+@implementation RTCNV12TextureCacheTests {
+ EAGLContext *_glContext;
+ RTCNV12TextureCache *_nv12TextureCache;
+}
+
+- (void)setUp {
+ [super setUp];
+ _glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
+ if (!_glContext) {
+ _glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
+ }
+ _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext];
+}
+
+- (void)tearDown {
+ _nv12TextureCache = nil;
+ _glContext = nil;
+ [super tearDown];
+}
+
+- (void)testNV12TextureCacheDoesNotCrashOnEmptyFrame {
+ CVPixelBufferRef nullPixelBuffer = NULL;
+ RTC_OBJC_TYPE(RTCCVPixelBuffer) *badFrameBuffer =
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:nullPixelBuffer];
+ RTC_OBJC_TYPE(RTCVideoFrame) *badFrame =
+ [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:badFrameBuffer
+ rotation:RTCVideoRotation_0
+ timeStampNs:0];
+ [_nv12TextureCache uploadFrameToTextures:badFrame];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm
new file mode 100644
index 0000000000..5ba5a52a53
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+#ifdef __cplusplus
+extern "C" {
+#endif
+#import <OCMock/OCMock.h>
+#ifdef __cplusplus
+}
+#endif
+#import "api/peerconnection/RTCPeerConnectionFactory+Native.h"
+#import "api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h"
+#import "api/peerconnection/RTCPeerConnectionFactoryBuilder.h"
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+
+#include "rtc_base/gunit.h"
+#include "rtc_base/system/unused.h"
+
+@interface RTCPeerConnectionFactoryBuilderTests : XCTestCase
+@end
+
+@implementation RTCPeerConnectionFactoryBuilderTests
+
+- (void)testBuilder {
+ id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]);
+ OCMExpect([factoryMock alloc]).andReturn(factoryMock);
+ RTC_UNUSED([[[[factoryMock expect] andReturn:factoryMock] ignoringNonObjectArgs]
+ initWithNativeAudioEncoderFactory:nullptr
+ nativeAudioDecoderFactory:nullptr
+ nativeVideoEncoderFactory:nullptr
+ nativeVideoDecoderFactory:nullptr
+ audioDeviceModule:nullptr
+ audioProcessingModule:nullptr]);
+ RTCPeerConnectionFactoryBuilder* builder = [[RTCPeerConnectionFactoryBuilder alloc] init];
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory =
+ [builder createPeerConnectionFactory];
+ EXPECT_TRUE(peerConnectionFactory != nil);
+ OCMVerifyAll(factoryMock);
+}
+
+- (void)testDefaultComponentsBuilder {
+ id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]);
+ OCMExpect([factoryMock alloc]).andReturn(factoryMock);
+ RTC_UNUSED([[[[factoryMock expect] andReturn:factoryMock] ignoringNonObjectArgs]
+ initWithNativeAudioEncoderFactory:nullptr
+ nativeAudioDecoderFactory:nullptr
+ nativeVideoEncoderFactory:nullptr
+ nativeVideoDecoderFactory:nullptr
+ audioDeviceModule:nullptr
+ audioProcessingModule:nullptr]);
+ RTCPeerConnectionFactoryBuilder* builder = [RTCPeerConnectionFactoryBuilder defaultBuilder];
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory =
+ [builder createPeerConnectionFactory];
+ EXPECT_TRUE(peerConnectionFactory != nil);
+ OCMVerifyAll(factoryMock);
+}
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m b/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m
new file mode 100644
index 0000000000..0c5a96cd2f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m
@@ -0,0 +1,381 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "api/peerconnection/RTCAudioSource.h"
+#import "api/peerconnection/RTCConfiguration.h"
+#import "api/peerconnection/RTCDataChannel.h"
+#import "api/peerconnection/RTCDataChannelConfiguration.h"
+#import "api/peerconnection/RTCMediaConstraints.h"
+#import "api/peerconnection/RTCMediaStreamTrack.h"
+#import "api/peerconnection/RTCPeerConnection.h"
+#import "api/peerconnection/RTCPeerConnectionFactory.h"
+#import "api/peerconnection/RTCRtpReceiver.h"
+#import "api/peerconnection/RTCRtpSender.h"
+#import "api/peerconnection/RTCRtpTransceiver.h"
+#import "api/peerconnection/RTCSessionDescription.h"
+#import "api/peerconnection/RTCVideoSource.h"
+#import "rtc_base/system/unused.h"
+
+#import <XCTest/XCTest.h>
+
+@interface RTCPeerConnectionFactoryTests : XCTestCase
+@end
+
+@implementation RTCPeerConnectionFactoryTests
+
+- (void)testPeerConnectionLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ peerConnection =
+ [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
+ [peerConnection close];
+ factory = nil;
+ }
+ peerConnection = nil;
+ }
+
+ XCTAssertTrue(true, @"Expect test does not crash");
+}
+
+- (void)testMediaStreamLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCMediaStream) * mediaStream;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ mediaStream = [factory mediaStreamWithStreamId:@"mediaStream"];
+ factory = nil;
+ }
+ mediaStream = nil;
+ RTC_UNUSED(mediaStream);
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+- (void)testDataChannelLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+ RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig =
+ [[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init];
+
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
+ RTC_OBJC_TYPE(RTCDataChannel) * dataChannel;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ peerConnection =
+ [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
+ dataChannel =
+ [peerConnection dataChannelForLabel:@"test_channel" configuration:dataChannelConfig];
+ XCTAssertNotNil(dataChannel);
+ [peerConnection close];
+ peerConnection = nil;
+ factory = nil;
+ }
+ dataChannel = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+- (void)testRTCRtpTransceiverLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
+ RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+ RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
+ [[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
+
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
+ RTC_OBJC_TYPE(RTCRtpTransceiver) * tranceiver;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ peerConnection =
+ [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
+ tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeAudio init:init];
+ XCTAssertNotNil(tranceiver);
+ [peerConnection close];
+ peerConnection = nil;
+ factory = nil;
+ }
+ tranceiver = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+// TODO(crbug.com/webrtc/13989): Remove call to CreateSender in senderWithKind.
+#if !TARGET_IPHONE_SIMULATOR
+- (void)testRTCRtpSenderLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
+ RTC_OBJC_TYPE(RTCRtpSender) * sender;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ peerConnection =
+ [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
+ sender = [peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo streamId:@"stream"];
+ XCTAssertNotNil(sender);
+ [peerConnection close];
+ peerConnection = nil;
+ factory = nil;
+ }
+ sender = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+- (void)testRTCRtpReceiverLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCPeerConnection) * pc1;
+ RTC_OBJC_TYPE(RTCPeerConnection) * pc2;
+
+ NSArray<RTC_OBJC_TYPE(RTCRtpReceiver) *> *receivers1;
+ NSArray<RTC_OBJC_TYPE(RTCRtpReceiver) *> *receivers2;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
+ [pc1 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"];
+
+ pc2 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
+ [pc2 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"];
+
+ NSTimeInterval negotiationTimeout = 15;
+ XCTAssertTrue([self negotiatePeerConnection:pc1
+ withPeerConnection:pc2
+ negotiationTimeout:negotiationTimeout]);
+
+ XCTAssertEqual(pc1.signalingState, RTCSignalingStateStable);
+ XCTAssertEqual(pc2.signalingState, RTCSignalingStateStable);
+
+ receivers1 = pc1.receivers;
+ receivers2 = pc2.receivers;
+ XCTAssertTrue(receivers1.count > 0);
+ XCTAssertTrue(receivers2.count > 0);
+ [pc1 close];
+ [pc2 close];
+ pc1 = nil;
+ pc2 = nil;
+ factory = nil;
+ }
+ receivers1 = nil;
+ receivers2 = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+#endif
+
+- (void)testAudioSourceLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCAudioSource) * audioSource;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ audioSource = [factory audioSourceWithConstraints:nil];
+ XCTAssertNotNil(audioSource);
+ factory = nil;
+ }
+ audioSource = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+- (void)testVideoSourceLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCVideoSource) * videoSource;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ videoSource = [factory videoSource];
+ XCTAssertNotNil(videoSource);
+ factory = nil;
+ }
+ videoSource = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+- (void)testAudioTrackLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCAudioTrack) * audioTrack;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ audioTrack = [factory audioTrackWithTrackId:@"audioTrack"];
+ XCTAssertNotNil(audioTrack);
+ factory = nil;
+ }
+ audioTrack = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+- (void)testVideoTrackLifetime {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ RTC_OBJC_TYPE(RTCVideoTrack) * videoTrack;
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ videoTrack = [factory videoTrackWithSource:[factory videoSource] trackId:@"videoTrack"];
+ XCTAssertNotNil(videoTrack);
+ factory = nil;
+ }
+ videoTrack = nil;
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+}
+
+- (void)testRollback {
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
+ RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{
+ kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue
+ }
+ optionalConstraints:nil];
+
+ __block RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+ __block RTC_OBJC_TYPE(RTCPeerConnection) * pc1;
+ RTCSessionDescription *rollback = [[RTCSessionDescription alloc] initWithType:RTCSdpTypeRollback
+ sdp:@""];
+
+ @autoreleasepool {
+ factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+ pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
+ dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
+ [pc1 offerForConstraints:constraints
+ completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer, NSError * error) {
+ XCTAssertNil(error);
+ XCTAssertNotNil(offer);
+
+ __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC1 = pc1;
+ [pc1 setLocalDescription:offer
+ completionHandler:^(NSError *error) {
+ XCTAssertNil(error);
+ [weakPC1 setLocalDescription:rollback
+ completionHandler:^(NSError *error) {
+ XCTAssertNil(error);
+ }];
+ }];
+ NSTimeInterval negotiationTimeout = 15;
+ dispatch_semaphore_wait(
+ negotiatedSem,
+ dispatch_time(DISPATCH_TIME_NOW, (int64_t)(negotiationTimeout * NSEC_PER_SEC)));
+
+ XCTAssertEqual(pc1.signalingState, RTCSignalingStateStable);
+
+ [pc1 close];
+ pc1 = nil;
+ factory = nil;
+ }];
+ }
+
+ XCTAssertTrue(true, "Expect test does not crash");
+ }
+}
+
+- (bool)negotiatePeerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)pc1
+ withPeerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)pc2
+ negotiationTimeout:(NSTimeInterval)timeout {
+ __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC1 = pc1;
+ __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC2 = pc2;
+ RTC_OBJC_TYPE(RTCMediaConstraints) *sdpConstraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{
+ kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue
+ }
+ optionalConstraints:nil];
+
+ dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
+ [weakPC1 offerForConstraints:sdpConstraints
+ completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer, NSError * error) {
+ XCTAssertNil(error);
+ XCTAssertNotNil(offer);
+ [weakPC1
+ setLocalDescription:offer
+ completionHandler:^(NSError *error) {
+ XCTAssertNil(error);
+ [weakPC2
+ setRemoteDescription:offer
+ completionHandler:^(NSError *error) {
+ XCTAssertNil(error);
+ [weakPC2
+ answerForConstraints:sdpConstraints
+ completionHandler:^(
+ RTC_OBJC_TYPE(RTCSessionDescription) * answer,
+ NSError * error) {
+ XCTAssertNil(error);
+ XCTAssertNotNil(answer);
+ [weakPC2
+ setLocalDescription:answer
+ completionHandler:^(NSError *error) {
+ XCTAssertNil(error);
+ [weakPC1
+ setRemoteDescription:answer
+ completionHandler:^(NSError *error) {
+ XCTAssertNil(error);
+ dispatch_semaphore_signal(negotiatedSem);
+ }];
+ }];
+ }];
+ }];
+ }];
+ }];
+
+ return 0 ==
+ dispatch_semaphore_wait(negotiatedSem,
+ dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC)));
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionTest.mm
new file mode 100644
index 0000000000..9ca8403559
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionTest.mm
@@ -0,0 +1,204 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include <memory>
+#include <vector>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCConfiguration+Private.h"
+#import "api/peerconnection/RTCConfiguration.h"
+#import "api/peerconnection/RTCCryptoOptions.h"
+#import "api/peerconnection/RTCIceCandidate.h"
+#import "api/peerconnection/RTCIceServer.h"
+#import "api/peerconnection/RTCMediaConstraints.h"
+#import "api/peerconnection/RTCPeerConnection.h"
+#import "api/peerconnection/RTCPeerConnectionFactory+Native.h"
+#import "api/peerconnection/RTCPeerConnectionFactory.h"
+#import "api/peerconnection/RTCSessionDescription.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCPeerConnectionTests : XCTestCase
+@end
+
+@implementation RTCPeerConnectionTests
+
+- (void)testConfigurationGetter {
+ NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
+
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
+ config.iceServers = @[ server ];
+ config.iceTransportPolicy = RTCIceTransportPolicyRelay;
+ config.bundlePolicy = RTCBundlePolicyMaxBundle;
+ config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate;
+ config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled;
+ config.candidateNetworkPolicy = RTCCandidateNetworkPolicyLowCost;
+ const int maxPackets = 60;
+ const int timeout = 1500;
+ const int interval = 2000;
+ config.audioJitterBufferMaxPackets = maxPackets;
+ config.audioJitterBufferFastAccelerate = YES;
+ config.iceConnectionReceivingTimeout = timeout;
+ config.iceBackupCandidatePairPingInterval = interval;
+ config.continualGatheringPolicy =
+ RTCContinualGatheringPolicyGatherContinually;
+ config.shouldPruneTurnPorts = YES;
+ config.activeResetSrtpParams = YES;
+ config.cryptoOptions =
+ [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
+ srtpEnableAes128Sha1_32CryptoCipher:YES
+ srtpEnableEncryptedRtpHeaderExtensions:NO
+ sframeRequireFrameEncryption:NO];
+
+ RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+ [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+
+ RTC_OBJC_TYPE(RTCConfiguration) * newConfig;
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
+ [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
+ newConfig = peerConnection.configuration;
+
+ EXPECT_TRUE([peerConnection setBweMinBitrateBps:[NSNumber numberWithInt:100000]
+ currentBitrateBps:[NSNumber numberWithInt:5000000]
+ maxBitrateBps:[NSNumber numberWithInt:500000000]]);
+ EXPECT_FALSE([peerConnection setBweMinBitrateBps:[NSNumber numberWithInt:2]
+ currentBitrateBps:[NSNumber numberWithInt:1]
+ maxBitrateBps:nil]);
+ }
+
+ EXPECT_EQ([config.iceServers count], [newConfig.iceServers count]);
+ RTC_OBJC_TYPE(RTCIceServer) *newServer = newConfig.iceServers[0];
+ RTC_OBJC_TYPE(RTCIceServer) *origServer = config.iceServers[0];
+ std::string origUrl = origServer.urlStrings.firstObject.UTF8String;
+ std::string url = newServer.urlStrings.firstObject.UTF8String;
+ EXPECT_EQ(origUrl, url);
+
+ EXPECT_EQ(config.iceTransportPolicy, newConfig.iceTransportPolicy);
+ EXPECT_EQ(config.bundlePolicy, newConfig.bundlePolicy);
+ EXPECT_EQ(config.rtcpMuxPolicy, newConfig.rtcpMuxPolicy);
+ EXPECT_EQ(config.tcpCandidatePolicy, newConfig.tcpCandidatePolicy);
+ EXPECT_EQ(config.candidateNetworkPolicy, newConfig.candidateNetworkPolicy);
+ EXPECT_EQ(config.audioJitterBufferMaxPackets, newConfig.audioJitterBufferMaxPackets);
+ EXPECT_EQ(config.audioJitterBufferFastAccelerate, newConfig.audioJitterBufferFastAccelerate);
+ EXPECT_EQ(config.iceConnectionReceivingTimeout, newConfig.iceConnectionReceivingTimeout);
+ EXPECT_EQ(config.iceBackupCandidatePairPingInterval,
+ newConfig.iceBackupCandidatePairPingInterval);
+ EXPECT_EQ(config.continualGatheringPolicy, newConfig.continualGatheringPolicy);
+ EXPECT_EQ(config.shouldPruneTurnPorts, newConfig.shouldPruneTurnPorts);
+ EXPECT_EQ(config.activeResetSrtpParams, newConfig.activeResetSrtpParams);
+ EXPECT_EQ(config.cryptoOptions.srtpEnableGcmCryptoSuites,
+ newConfig.cryptoOptions.srtpEnableGcmCryptoSuites);
+ EXPECT_EQ(config.cryptoOptions.srtpEnableAes128Sha1_32CryptoCipher,
+ newConfig.cryptoOptions.srtpEnableAes128Sha1_32CryptoCipher);
+ EXPECT_EQ(config.cryptoOptions.srtpEnableEncryptedRtpHeaderExtensions,
+ newConfig.cryptoOptions.srtpEnableEncryptedRtpHeaderExtensions);
+ EXPECT_EQ(config.cryptoOptions.sframeRequireFrameEncryption,
+ newConfig.cryptoOptions.sframeRequireFrameEncryption);
+}
+
+- (void)testWithDependencies {
+ NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
+ RTC_OBJC_TYPE(RTCIceServer) *server =
+ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
+
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
+ config.iceServers = @[ server ];
+ RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+ [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+
+ std::unique_ptr<webrtc::PeerConnectionDependencies> pc_dependencies =
+ std::make_unique<webrtc::PeerConnectionDependencies>(nullptr);
+ @autoreleasepool {
+ RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
+ [factory peerConnectionWithDependencies:config
+ constraints:contraints
+ dependencies:std::move(pc_dependencies)
+ delegate:nil];
+ ASSERT_NE(peerConnection, nil);
+ }
+}
+
+- (void)testWithInvalidSDP {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+ [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
+ RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+ RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
+ [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
+
+ dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
+ [peerConnection setRemoteDescription:[[RTC_OBJC_TYPE(RTCSessionDescription) alloc]
+ initWithType:RTCSdpTypeOffer
+ sdp:@"invalid"]
+ completionHandler:^(NSError *error) {
+ ASSERT_NE(error, nil);
+ if (error != nil) {
+ dispatch_semaphore_signal(negotiatedSem);
+ }
+ }];
+
+ NSTimeInterval timeout = 5;
+ ASSERT_EQ(
+ 0,
+ dispatch_semaphore_wait(negotiatedSem,
+ dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC))));
+ [peerConnection close];
+}
+
+- (void)testWithInvalidIceCandidate {
+ RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+ [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
+
+ RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+ config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
+ RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+ [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+ optionalConstraints:nil];
+ RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
+ [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
+
+ dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
+ [peerConnection addIceCandidate:[[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:@"invalid"
+ sdpMLineIndex:-1
+ sdpMid:nil]
+ completionHandler:^(NSError *error) {
+ ASSERT_NE(error, nil);
+ if (error != nil) {
+ dispatch_semaphore_signal(negotiatedSem);
+ }
+ }];
+
+ NSTimeInterval timeout = 5;
+ ASSERT_EQ(
+ 0,
+ dispatch_semaphore_wait(negotiatedSem,
+ dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC))));
+ [peerConnection close];
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCSessionDescriptionTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCSessionDescriptionTest.mm
new file mode 100644
index 0000000000..70c82f78ce
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCSessionDescriptionTest.mm
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCSessionDescription+Private.h"
+#import "api/peerconnection/RTCSessionDescription.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCSessionDescriptionTests : XCTestCase
+@end
+
+@implementation RTCSessionDescriptionTests
+
+/**
+ * Test conversion of an Objective-C RTC_OBJC_TYPE(RTCSessionDescription) to a native
+ * SessionDescriptionInterface (based on the types and SDP strings being equal).
+ */
+- (void)testSessionDescriptionConversion {
+ RTC_OBJC_TYPE(RTCSessionDescription) *description =
+ [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:[self sdp]];
+
+ std::unique_ptr<webrtc::SessionDescriptionInterface> nativeDescription =
+ description.nativeDescription;
+
+ EXPECT_EQ(RTCSdpTypeAnswer,
+ [RTC_OBJC_TYPE(RTCSessionDescription) typeForStdString:nativeDescription->type()]);
+
+ std::string sdp;
+ nativeDescription->ToString(&sdp);
+ EXPECT_EQ([self sdp].stdString, sdp);
+}
+
+- (void)testInitFromNativeSessionDescription {
+ webrtc::SessionDescriptionInterface *nativeDescription;
+
+ nativeDescription = webrtc::CreateSessionDescription(
+ webrtc::SessionDescriptionInterface::kAnswer,
+ [self sdp].stdString,
+ nullptr);
+
+ RTC_OBJC_TYPE(RTCSessionDescription) *description =
+ [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:nativeDescription];
+ EXPECT_EQ(webrtc::SessionDescriptionInterface::kAnswer,
+ [RTC_OBJC_TYPE(RTCSessionDescription) stdStringForType:description.type]);
+ EXPECT_TRUE([[self sdp] isEqualToString:description.sdp]);
+}
+
+- (NSString *)sdp {
+ return @"v=0\r\n"
+ "o=- 5319989746393411314 2 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE audio video\r\n"
+ "a=msid-semantic: WMS ARDAMS\r\n"
+ "m=audio 9 UDP/TLS/RTP/SAVPF 111 103 9 0 8 126\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:f3o+0HG7l9nwIWFY\r\n"
+ "a=ice-pwd:VDctmJNCptR2TB7+meDpw7w5\r\n"
+ "a=fingerprint:sha-256 A9:D5:8D:A8:69:22:39:60:92:AD:94:1A:22:2D:5E:"
+ "A5:4A:A9:18:C2:35:5D:46:5E:59:BD:1C:AF:38:9F:E6:E1\r\n"
+ "a=setup:active\r\n"
+ "a=mid:audio\r\n"
+ "a=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\n"
+ "a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/"
+ "abs-send-time\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=fmtp:111 minptime=10;useinbandfec=1\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:9 G722/8000\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:126 telephone-event/8000\r\n"
+ "a=maxptime:60\r\n"
+ "a=ssrc:1504474588 cname:V+FdIC5AJpxLhdYQ\r\n"
+ "a=ssrc:1504474588 msid:ARDAMS ARDAMSa0\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 100 116 117 96\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:f3o+0HG7l9nwIWFY\r\n"
+ "a=ice-pwd:VDctmJNCptR2TB7+meDpw7w5\r\n"
+ "a=fingerprint:sha-256 A9:D5:8D:A8:69:22:39:60:92:AD:94:1A:22:2D:5E:"
+ "A5:4A:A9:18:C2:35:5D:46:5E:59:BD:1C:AF:38:9F:E6:E1\r\n"
+ "a=setup:active\r\n"
+ "a=mid:video\r\n"
+ "a=extmap:2 urn:ietf:params:rtp-hdrext:toffset\r\n"
+ "a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/"
+ "abs-send-time\r\n"
+ "a=extmap:4 urn:3gpp:video-orientation\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:100 VP8/90000\r\n"
+ "a=rtcp-fb:100 ccm fir\r\n"
+ "a=rtcp-fb:100 goog-lntf\r\n"
+ "a=rtcp-fb:100 nack\r\n"
+ "a=rtcp-fb:100 nack pli\r\n"
+ "a=rtcp-fb:100 goog-remb\r\n"
+ "a=rtpmap:116 red/90000\r\n"
+ "a=rtpmap:117 ulpfec/90000\r\n"
+ "a=rtpmap:96 rtx/90000\r\n"
+ "a=fmtp:96 apt=100\r\n"
+ "a=ssrc-group:FID 498297514 1644357692\r\n"
+ "a=ssrc:498297514 cname:V+FdIC5AJpxLhdYQ\r\n"
+ "a=ssrc:498297514 msid:ARDAMS ARDAMSv0\r\n"
+ "a=ssrc:1644357692 cname:V+FdIC5AJpxLhdYQ\r\n"
+ "a=ssrc:1644357692 msid:ARDAMS ARDAMSv0\r\n";
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCTracingTest.mm b/third_party/libwebrtc/sdk/objc/unittests/RTCTracingTest.mm
new file mode 100644
index 0000000000..ff93047bdf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/RTCTracingTest.mm
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <XCTest/XCTest.h>
+
+#include <vector>
+
+#include "rtc_base/gunit.h"
+
+#import "api/peerconnection/RTCTracing.h"
+#import "helpers/NSString+StdString.h"
+
+@interface RTCTracingTests : XCTestCase
+@end
+
+@implementation RTCTracingTests
+
+- (NSString *)documentsFilePathForFileName:(NSString *)fileName {
+ NSParameterAssert(fileName.length);
+ NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
+ NSString *documentsDirPath = paths.firstObject;
+ NSString *filePath =
+ [documentsDirPath stringByAppendingPathComponent:fileName];
+ return filePath;
+}
+
+- (void)testTracingTestNoInitialization {
+ NSString *filePath = [self documentsFilePathForFileName:@"webrtc-trace.txt"];
+ EXPECT_EQ(NO, RTCStartInternalCapture(filePath));
+ RTCStopInternalCapture();
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/audio_short16.pcm b/third_party/libwebrtc/sdk/objc/unittests/audio_short16.pcm
new file mode 100644
index 0000000000..15a0f1811c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/audio_short16.pcm
Binary files differ
diff --git a/third_party/libwebrtc/sdk/objc/unittests/audio_short44.pcm b/third_party/libwebrtc/sdk/objc/unittests/audio_short44.pcm
new file mode 100644
index 0000000000..011cdce959
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/audio_short44.pcm
Binary files differ
diff --git a/third_party/libwebrtc/sdk/objc/unittests/audio_short48.pcm b/third_party/libwebrtc/sdk/objc/unittests/audio_short48.pcm
new file mode 100644
index 0000000000..06fd8261cd
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/audio_short48.pcm
Binary files differ
diff --git a/third_party/libwebrtc/sdk/objc/unittests/avformatmappertests.mm b/third_party/libwebrtc/sdk/objc/unittests/avformatmappertests.mm
new file mode 100644
index 0000000000..35e95a8c22
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/avformatmappertests.mm
@@ -0,0 +1,243 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <OCMock/OCMock.h>
+
+#include "rtc_base/gunit.h"
+
+// Width and height don't play any role so lets use predefined values throughout
+// the tests.
+static const int kFormatWidth = 789;
+static const int kFormatHeight = 987;
+
+// Hardcoded framrate to be used throughout the tests.
+static const int kFramerate = 30;
+
+// Same width and height is used so it's ok to expect same cricket::VideoFormat
+static cricket::VideoFormat expectedFormat =
+ cricket::VideoFormat(kFormatWidth,
+ kFormatHeight,
+ cricket::VideoFormat::FpsToInterval(kFramerate),
+ cricket::FOURCC_NV12);
+
+// Mock class for AVCaptureDeviceFormat.
+// Custom implementation needed because OCMock cannot handle the
+// CMVideoDescriptionRef mocking.
+@interface AVCaptureDeviceFormatMock : NSObject
+
+@property (nonatomic, assign) CMVideoFormatDescriptionRef format;
+@property (nonatomic, strong) OCMockObject *rangeMock;
+
+- (instancetype)initWithMediaSubtype:(FourCharCode)subtype
+ minFps:(float)minFps
+ maxFps:(float)maxFps;
++ (instancetype)validFormat;
++ (instancetype)invalidFpsFormat;
++ (instancetype)invalidMediaSubtypeFormat;
+
+@end
+
+@implementation AVCaptureDeviceFormatMock
+
+@synthesize format = _format;
+@synthesize rangeMock = _rangeMock;
+
+- (instancetype)initWithMediaSubtype:(FourCharCode)subtype
+ minFps:(float)minFps
+ maxFps:(float)maxFps {
+ if (self = [super init]) {
+ CMVideoFormatDescriptionCreate(nil, subtype, kFormatWidth, kFormatHeight,
+ nil, &_format);
+ // We can use OCMock for the range.
+ _rangeMock = [OCMockObject mockForClass:[AVFrameRateRange class]];
+ [[[_rangeMock stub] andReturnValue:@(minFps)] minFrameRate];
+ [[[_rangeMock stub] andReturnValue:@(maxFps)] maxFrameRate];
+ }
+
+ return self;
+}
+
++ (instancetype)validFormat {
+ AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc]
+ initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ minFps:0.0
+ maxFps:30.0];
+ return instance;
+}
+
++ (instancetype)invalidFpsFormat {
+ AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc]
+ initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ minFps:0.0
+ maxFps:22.0];
+ return instance;
+}
+
++ (instancetype)invalidMediaSubtypeFormat {
+ AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc]
+ initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8Planar
+ minFps:0.0
+ maxFps:60.0];
+ return instance;
+}
+
+- (void)dealloc {
+ if (_format != nil) {
+ CFRelease(_format);
+ _format = nil;
+ }
+}
+
+// Redefinition of AVCaptureDevice methods we want to mock.
+- (CMVideoFormatDescriptionRef)formatDescription {
+ return self.format;
+}
+
+- (NSArray *)videoSupportedFrameRateRanges {
+ return @[ self.rangeMock ];
+}
+
+@end
+
+TEST(AVFormatMapperTest, SuportedCricketFormatsWithInvalidFramerateFormats) {
+ // given
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+
+ // Valid media subtype, invalid framerate
+ AVCaptureDeviceFormatMock* mock =
+ [AVCaptureDeviceFormatMock invalidFpsFormat];
+ OCMStub([mockDevice formats]).andReturn(@[ mock ]);
+
+ // when
+ std::set<cricket::VideoFormat> result =
+ webrtc::GetSupportedVideoFormatsForDevice(mockDevice);
+
+ // then
+ EXPECT_TRUE(result.empty());
+}
+
+TEST(AVFormatMapperTest, SuportedCricketFormatsWithInvalidFormats) {
+ // given
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+
+ // Invalid media subtype, valid framerate
+ AVCaptureDeviceFormatMock* mock =
+ [AVCaptureDeviceFormatMock invalidMediaSubtypeFormat];
+ OCMStub([mockDevice formats]).andReturn(@[ mock ]);
+
+ // when
+ std::set<cricket::VideoFormat> result =
+ webrtc::GetSupportedVideoFormatsForDevice(mockDevice);
+
+ // then
+ EXPECT_TRUE(result.empty());
+}
+
+TEST(AVFormatMapperTest, SuportedCricketFormats) {
+ // given
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+
+ // valid media subtype, valid framerate
+ AVCaptureDeviceFormatMock* mock = [AVCaptureDeviceFormatMock validFormat];
+ OCMStub([mockDevice formats]).andReturn(@[ mock ]);
+
+ // when
+ std::set<cricket::VideoFormat> result =
+ webrtc::GetSupportedVideoFormatsForDevice(mockDevice);
+
+ // then
+ EXPECT_EQ(1u, result.size());
+ // make sure the set has the expected format
+ EXPECT_EQ(expectedFormat, *result.begin());
+}
+
+TEST(AVFormatMapperTest, MediaSubtypePreference) {
+ // given
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+
+ // valid media subtype, valid framerate
+ AVCaptureDeviceFormatMock* mockOne = [[AVCaptureDeviceFormatMock alloc]
+ initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
+ minFps:0.0
+ maxFps:30.0];
+ // valid media subtype, valid framerate.
+ // This media subtype should be the preffered one.
+ AVCaptureDeviceFormatMock* mockTwo = [[AVCaptureDeviceFormatMock alloc]
+ initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ minFps:0.0
+ maxFps:30.0];
+ OCMStub([mockDevice lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
+ OCMStub([mockDevice unlockForConfiguration]);
+ NSArray* array = @[ mockOne, mockTwo ];
+ OCMStub([mockDevice formats]).andReturn(array);
+
+ // to verify
+ OCMExpect([mockDevice setActiveFormat:(AVCaptureDeviceFormat*)mockTwo]);
+ OCMExpect(
+ [mockDevice setActiveVideoMinFrameDuration:CMTimeMake(1, kFramerate)]);
+
+ // when
+ bool resultFormat =
+ webrtc::SetFormatForCaptureDevice(mockDevice, nil, expectedFormat);
+
+ // then
+ EXPECT_TRUE(resultFormat);
+ [mockDevice verify];
+}
+
+TEST(AVFormatMapperTest, SetFormatWhenDeviceCannotLock) {
+ // given
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+ [[[mockDevice stub] andReturnValue:@(NO)]
+ lockForConfiguration:[OCMArg setTo:nil]];
+ [[[mockDevice stub] andReturn:@[]] formats];
+
+ // when
+ bool resultFormat = webrtc::SetFormatForCaptureDevice(mockDevice, nil,
+ cricket::VideoFormat());
+
+ // then
+ EXPECT_FALSE(resultFormat);
+}
+
+TEST(AVFormatMapperTest, SetFormatWhenFormatIsIncompatible) {
+ // given
+ id mockDevice = OCMClassMock([AVCaptureDevice class]);
+ OCMStub([mockDevice formats]).andReturn(@[]);
+ OCMStub([mockDevice lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
+ NSException* testException =
+ [NSException exceptionWithName:@"Test exception"
+ reason:@"Raised from unit tests"
+ userInfo:nil];
+ OCMStub([mockDevice setActiveFormat:[OCMArg any]]).andThrow(testException);
+ OCMExpect([mockDevice unlockForConfiguration]);
+
+ // when
+ bool resultFormat = webrtc::SetFormatForCaptureDevice(mockDevice, nil,
+ cricket::VideoFormat());
+
+ // then
+ EXPECT_FALSE(resultFormat);
+
+ // TODO(denicija): Remove try-catch when Chromium rolls this change:
+ // https://github.com/erikdoe/ocmock/commit/de1419415581dc307045e54bfe9c98c86efea96b
+ // Without it, stubbed exceptions are being re-raised on [mock verify].
+ // More information here:
+ //https://github.com/erikdoe/ocmock/issues/241
+ @try {
+ [mockDevice verify];
+ } @catch (NSException* exception) {
+ if ([exception.reason isEqual:testException.reason]) {
+ // Nothing dangerous here
+ EXPECT_TRUE([exception.reason isEqualToString:exception.reason]);
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/objc/unittests/foreman.mp4 b/third_party/libwebrtc/sdk/objc/unittests/foreman.mp4
new file mode 100644
index 0000000000..ccffbf4722
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/foreman.mp4
Binary files differ
diff --git a/third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.h b/third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.h
new file mode 100644
index 0000000000..76c0d15c7e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+
+#include "api/video/i420_buffer.h"
+
+void DrawGradientInRGBPixelBuffer(CVPixelBufferRef pixelBuffer);
+
+rtc::scoped_refptr<webrtc::I420Buffer> CreateI420Gradient(int width,
+ int height);
+
+void CopyI420BufferToCVPixelBuffer(
+ rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer,
+ CVPixelBufferRef pixelBuffer);
diff --git a/third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.mm b/third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.mm
new file mode 100644
index 0000000000..98b86c54c0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/frame_buffer_helpers.mm
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/objc/unittests/frame_buffer_helpers.h"
+
+#include "third_party/libyuv/include/libyuv.h"
+
+void DrawGradientInRGBPixelBuffer(CVPixelBufferRef pixelBuffer) {
+ CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+ void* baseAddr = CVPixelBufferGetBaseAddress(pixelBuffer);
+ size_t width = CVPixelBufferGetWidth(pixelBuffer);
+ size_t height = CVPixelBufferGetHeight(pixelBuffer);
+ CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
+ int byteOrder = CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32ARGB ?
+ kCGBitmapByteOrder32Little :
+ 0;
+ CGContextRef cgContext = CGBitmapContextCreate(baseAddr,
+ width,
+ height,
+ 8,
+ CVPixelBufferGetBytesPerRow(pixelBuffer),
+ colorSpace,
+ byteOrder | kCGImageAlphaNoneSkipLast);
+
+ // Create a gradient
+ CGFloat colors[] = {
+ 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0,
+ };
+ CGGradientRef gradient = CGGradientCreateWithColorComponents(colorSpace, colors, NULL, 4);
+
+ CGContextDrawLinearGradient(
+ cgContext, gradient, CGPointMake(0, 0), CGPointMake(width, height), 0);
+ CGGradientRelease(gradient);
+
+ CGImageRef cgImage = CGBitmapContextCreateImage(cgContext);
+ CGContextRelease(cgContext);
+ CGImageRelease(cgImage);
+ CGColorSpaceRelease(colorSpace);
+
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+}
+
+rtc::scoped_refptr<webrtc::I420Buffer> CreateI420Gradient(int width, int height) {
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer(webrtc::I420Buffer::Create(width, height));
+ // Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h
+ for (int x = 0; x < width; x++) {
+ for (int y = 0; y < height; y++) {
+ buffer->MutableDataY()[x + y * width] = 128 * (x * height + y * width) / (width * height);
+ }
+ }
+ int chroma_width = buffer->ChromaWidth();
+ int chroma_height = buffer->ChromaHeight();
+ for (int x = 0; x < chroma_width; x++) {
+ for (int y = 0; y < chroma_height; y++) {
+ buffer->MutableDataU()[x + y * chroma_width] = 255 * x / (chroma_width - 1);
+ buffer->MutableDataV()[x + y * chroma_width] = 255 * y / (chroma_height - 1);
+ }
+ }
+ return buffer;
+}
+
+void CopyI420BufferToCVPixelBuffer(rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer,
+ CVPixelBufferRef pixelBuffer) {
+ CVPixelBufferLockBaseAddress(pixelBuffer, 0);
+
+ const OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
+ if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange ||
+ pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
+ // NV12
+ uint8_t* dstY = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
+ const int dstYStride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
+ uint8_t* dstUV = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
+ const int dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
+
+ libyuv::I420ToNV12(i420Buffer->DataY(),
+ i420Buffer->StrideY(),
+ i420Buffer->DataU(),
+ i420Buffer->StrideU(),
+ i420Buffer->DataV(),
+ i420Buffer->StrideV(),
+ dstY,
+ dstYStride,
+ dstUV,
+ dstUVStride,
+ i420Buffer->width(),
+ i420Buffer->height());
+ } else {
+ uint8_t* dst = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(pixelBuffer));
+ const int bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
+
+ if (pixelFormat == kCVPixelFormatType_32BGRA) {
+ // Corresponds to libyuv::FOURCC_ARGB
+ libyuv::I420ToARGB(i420Buffer->DataY(),
+ i420Buffer->StrideY(),
+ i420Buffer->DataU(),
+ i420Buffer->StrideU(),
+ i420Buffer->DataV(),
+ i420Buffer->StrideV(),
+ dst,
+ bytesPerRow,
+ i420Buffer->width(),
+ i420Buffer->height());
+ } else if (pixelFormat == kCVPixelFormatType_32ARGB) {
+ // Corresponds to libyuv::FOURCC_BGRA
+ libyuv::I420ToBGRA(i420Buffer->DataY(),
+ i420Buffer->StrideY(),
+ i420Buffer->DataU(),
+ i420Buffer->StrideU(),
+ i420Buffer->DataV(),
+ i420Buffer->StrideV(),
+ dst,
+ bytesPerRow,
+ i420Buffer->width(),
+ i420Buffer->height());
+ }
+ }
+
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
+}
diff --git a/third_party/libwebrtc/sdk/objc/unittests/main.mm b/third_party/libwebrtc/sdk/objc/unittests/main.mm
new file mode 100644
index 0000000000..9c513762c1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/main.mm
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+#include "rtc_base/thread.h"
+#include "test/ios/coverage_util_ios.h"
+
+int main(int argc, char* argv[]) {
+ rtc::test::ConfigureCoverageReportPath();
+
+ rtc::AutoThread main_thread;
+
+ @autoreleasepool {
+ return UIApplicationMain(argc, argv, nil, nil);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/objc/unittests/nalu_rewriter_xctest.mm b/third_party/libwebrtc/sdk/objc/unittests/nalu_rewriter_xctest.mm
new file mode 100644
index 0000000000..82da549bb6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/nalu_rewriter_xctest.mm
@@ -0,0 +1,374 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_video/h264/h264_common.h"
+#include "components/video_codec/nalu_rewriter.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/gunit.h"
+
+#import <XCTest/XCTest.h>
+
+#if TARGET_OS_IPHONE
+#import <AVFoundation/AVFoundation.h>
+#import <UIKit/UIKit.h>
+#endif
+
+@interface NaluRewriterTests : XCTestCase
+
+@end
+
+static const uint8_t NALU_TEST_DATA_0[] = {0xAA, 0xBB, 0xCC};
+static const uint8_t NALU_TEST_DATA_1[] = {0xDE, 0xAD, 0xBE, 0xEF};
+
+// clang-format off
+static const uint8_t SPS_PPS_BUFFER[] = {
+ // SPS nalu.
+ 0x00, 0x00, 0x00, 0x01, 0x27, 0x42, 0x00, 0x1E, 0xAB, 0x40, 0xF0, 0x28,
+ 0xD3, 0x70, 0x20, 0x20, 0x20, 0x20,
+ // PPS nalu.
+ 0x00, 0x00, 0x00, 0x01, 0x28, 0xCE, 0x3C, 0x30};
+// clang-format on
+
+@implementation NaluRewriterTests
+
+- (void)testCreateVideoFormatDescription {
+ CMVideoFormatDescriptionRef description =
+ webrtc::CreateVideoFormatDescription(SPS_PPS_BUFFER, arraysize(SPS_PPS_BUFFER));
+ XCTAssertTrue(description);
+ if (description) {
+ CFRelease(description);
+ description = nullptr;
+ }
+
+ // clang-format off
+ const uint8_t sps_pps_not_at_start_buffer[] = {
+ // Add some non-SPS/PPS NALUs at the beginning
+ 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x01, 0xFF, 0x00, 0x00, 0x00, 0x01,
+ 0xAB, 0x33, 0x21,
+ // SPS nalu.
+ 0x00, 0x00, 0x01, 0x27, 0x42, 0x00, 0x1E, 0xAB, 0x40, 0xF0, 0x28, 0xD3,
+ 0x70, 0x20, 0x20, 0x20, 0x20,
+ // PPS nalu.
+ 0x00, 0x00, 0x01, 0x28, 0xCE, 0x3C, 0x30};
+ // clang-format on
+ description = webrtc::CreateVideoFormatDescription(sps_pps_not_at_start_buffer,
+ arraysize(sps_pps_not_at_start_buffer));
+
+ XCTAssertTrue(description);
+
+ if (description) {
+ CFRelease(description);
+ description = nullptr;
+ }
+
+ const uint8_t other_buffer[] = {0x00, 0x00, 0x00, 0x01, 0x28};
+ XCTAssertFalse(webrtc::CreateVideoFormatDescription(other_buffer, arraysize(other_buffer)));
+}
+
+- (void)testReadEmptyInput {
+ const uint8_t annex_b_test_data[] = {0x00};
+ webrtc::AnnexBBufferReader reader(annex_b_test_data, 0);
+ const uint8_t* nalu = nullptr;
+ size_t nalu_length = 0;
+ XCTAssertEqual(0u, reader.BytesRemaining());
+ XCTAssertFalse(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(nullptr, nalu);
+ XCTAssertEqual(0u, nalu_length);
+}
+
+- (void)testReadSingleNalu {
+ const uint8_t annex_b_test_data[] = {0x00, 0x00, 0x00, 0x01, 0xAA};
+ webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
+ const uint8_t* nalu = nullptr;
+ size_t nalu_length = 0;
+ XCTAssertEqual(arraysize(annex_b_test_data), reader.BytesRemaining());
+ XCTAssertTrue(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(annex_b_test_data + 4, nalu);
+ XCTAssertEqual(1u, nalu_length);
+ XCTAssertEqual(0u, reader.BytesRemaining());
+ XCTAssertFalse(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(nullptr, nalu);
+ XCTAssertEqual(0u, nalu_length);
+}
+
+- (void)testReadSingleNalu3ByteHeader {
+ const uint8_t annex_b_test_data[] = {0x00, 0x00, 0x01, 0xAA};
+ webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
+ const uint8_t* nalu = nullptr;
+ size_t nalu_length = 0;
+ XCTAssertEqual(arraysize(annex_b_test_data), reader.BytesRemaining());
+ XCTAssertTrue(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(annex_b_test_data + 3, nalu);
+ XCTAssertEqual(1u, nalu_length);
+ XCTAssertEqual(0u, reader.BytesRemaining());
+ XCTAssertFalse(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(nullptr, nalu);
+ XCTAssertEqual(0u, nalu_length);
+}
+
+- (void)testReadMissingNalu {
+ // clang-format off
+ const uint8_t annex_b_test_data[] = {0x01,
+ 0x00, 0x01,
+ 0x00, 0x00, 0x00, 0xFF};
+ // clang-format on
+ webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
+ const uint8_t* nalu = nullptr;
+ size_t nalu_length = 0;
+ XCTAssertEqual(0u, reader.BytesRemaining());
+ XCTAssertFalse(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(nullptr, nalu);
+ XCTAssertEqual(0u, nalu_length);
+}
+
+- (void)testReadMultipleNalus {
+ // clang-format off
+ const uint8_t annex_b_test_data[] = {0x00, 0x00, 0x00, 0x01, 0xFF,
+ 0x01,
+ 0x00, 0x01,
+ 0x00, 0x00, 0x00, 0xFF,
+ 0x00, 0x00, 0x01, 0xAA, 0xBB};
+ // clang-format on
+ webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
+ const uint8_t* nalu = nullptr;
+ size_t nalu_length = 0;
+ XCTAssertEqual(arraysize(annex_b_test_data), reader.BytesRemaining());
+ XCTAssertTrue(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(annex_b_test_data + 4, nalu);
+ XCTAssertEqual(8u, nalu_length);
+ XCTAssertEqual(5u, reader.BytesRemaining());
+ XCTAssertTrue(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(annex_b_test_data + 15, nalu);
+ XCTAssertEqual(2u, nalu_length);
+ XCTAssertEqual(0u, reader.BytesRemaining());
+ XCTAssertFalse(reader.ReadNalu(&nalu, &nalu_length));
+ XCTAssertEqual(nullptr, nalu);
+ XCTAssertEqual(0u, nalu_length);
+}
+
+- (void)testEmptyOutputBuffer {
+ const uint8_t expected_buffer[] = {0x00};
+ const size_t buffer_size = 1;
+ std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
+ memset(buffer.get(), 0, buffer_size);
+ webrtc::AvccBufferWriter writer(buffer.get(), 0);
+ XCTAssertEqual(0u, writer.BytesRemaining());
+ XCTAssertFalse(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
+ XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
+}
+
+- (void)testWriteSingleNalu {
+ const uint8_t expected_buffer[] = {
+ 0x00, 0x00, 0x00, 0x03, 0xAA, 0xBB, 0xCC,
+ };
+ const size_t buffer_size = arraysize(NALU_TEST_DATA_0) + 4;
+ std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
+ webrtc::AvccBufferWriter writer(buffer.get(), buffer_size);
+ XCTAssertEqual(buffer_size, writer.BytesRemaining());
+ XCTAssertTrue(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
+ XCTAssertEqual(0u, writer.BytesRemaining());
+ XCTAssertFalse(writer.WriteNalu(NALU_TEST_DATA_1, arraysize(NALU_TEST_DATA_1)));
+ XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
+}
+
+- (void)testWriteMultipleNalus {
+ // clang-format off
+ const uint8_t expected_buffer[] = {
+ 0x00, 0x00, 0x00, 0x03, 0xAA, 0xBB, 0xCC,
+ 0x00, 0x00, 0x00, 0x04, 0xDE, 0xAD, 0xBE, 0xEF
+ };
+ // clang-format on
+ const size_t buffer_size = arraysize(NALU_TEST_DATA_0) + arraysize(NALU_TEST_DATA_1) + 8;
+ std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
+ webrtc::AvccBufferWriter writer(buffer.get(), buffer_size);
+ XCTAssertEqual(buffer_size, writer.BytesRemaining());
+ XCTAssertTrue(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
+ XCTAssertEqual(buffer_size - (arraysize(NALU_TEST_DATA_0) + 4), writer.BytesRemaining());
+ XCTAssertTrue(writer.WriteNalu(NALU_TEST_DATA_1, arraysize(NALU_TEST_DATA_1)));
+ XCTAssertEqual(0u, writer.BytesRemaining());
+ XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
+}
+
+- (void)testOverflow {
+ const uint8_t expected_buffer[] = {0x00, 0x00, 0x00};
+ const size_t buffer_size = arraysize(NALU_TEST_DATA_0);
+ std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
+ memset(buffer.get(), 0, buffer_size);
+ webrtc::AvccBufferWriter writer(buffer.get(), buffer_size);
+ XCTAssertEqual(buffer_size, writer.BytesRemaining());
+ XCTAssertFalse(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
+ XCTAssertEqual(buffer_size, writer.BytesRemaining());
+ XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
+}
+
+- (void)testH264AnnexBBufferToCMSampleBuffer {
+ // clang-format off
+ const uint8_t annex_b_test_data[] = {
+ 0x00,
+ 0x00, 0x00, 0x01,
+ 0x01, 0x00, 0x00, 0xFF, // first chunk, 4 bytes
+ 0x00, 0x00, 0x01,
+ 0xAA, 0xFF, // second chunk, 2 bytes
+ 0x00, 0x00, 0x01,
+ 0xBB}; // third chunk, 1 byte, will not fit into output array
+
+ const uint8_t expected_cmsample_data[] = {
+ 0x00, 0x00, 0x00, 0x04,
+ 0x01, 0x00, 0x00, 0xFF, // first chunk, 4 bytes
+ 0x00, 0x00, 0x00, 0x02,
+ 0xAA, 0xFF}; // second chunk, 2 bytes
+ // clang-format on
+
+ CMMemoryPoolRef memory_pool = CMMemoryPoolCreate(nil);
+ CMSampleBufferRef out_sample_buffer = nil;
+ CMVideoFormatDescriptionRef description = [self createDescription];
+
+ Boolean result = webrtc::H264AnnexBBufferToCMSampleBuffer(annex_b_test_data,
+ arraysize(annex_b_test_data),
+ description,
+ &out_sample_buffer,
+ memory_pool);
+
+ XCTAssertTrue(result);
+
+ XCTAssertEqual(description, CMSampleBufferGetFormatDescription(out_sample_buffer));
+
+ char* data_ptr = nullptr;
+ CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(out_sample_buffer);
+ size_t block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
+ CMBlockBufferGetDataPointer(block_buffer, 0, nullptr, nullptr, &data_ptr);
+ XCTAssertEqual(block_buffer_size, arraysize(annex_b_test_data));
+
+ int data_comparison_result =
+ memcmp(expected_cmsample_data, data_ptr, arraysize(expected_cmsample_data));
+
+ XCTAssertEqual(0, data_comparison_result);
+
+ if (description) {
+ CFRelease(description);
+ description = nullptr;
+ }
+
+ CMMemoryPoolInvalidate(memory_pool);
+ CFRelease(memory_pool);
+}
+
+- (void)testH264CMSampleBufferToAnnexBBuffer {
+ // clang-format off
+ const uint8_t cmsample_data[] = {
+ 0x00, 0x00, 0x00, 0x04,
+ 0x01, 0x00, 0x00, 0xFF, // first chunk, 4 bytes
+ 0x00, 0x00, 0x00, 0x02,
+ 0xAA, 0xFF}; // second chunk, 2 bytes
+
+ const uint8_t expected_annex_b_data[] = {
+ 0x00, 0x00, 0x00, 0x01,
+ 0x01, 0x00, 0x00, 0xFF, // first chunk, 4 bytes
+ 0x00, 0x00, 0x00, 0x01,
+ 0xAA, 0xFF}; // second chunk, 2 bytes
+ // clang-format on
+
+ rtc::Buffer annexb_buffer(arraysize(cmsample_data));
+ CMSampleBufferRef sample_buffer =
+ [self createCMSampleBufferRef:(void*)cmsample_data cmsampleSize:arraysize(cmsample_data)];
+
+ Boolean result = webrtc::H264CMSampleBufferToAnnexBBuffer(sample_buffer,
+ /* is_keyframe */ false,
+ &annexb_buffer);
+
+ XCTAssertTrue(result);
+
+ XCTAssertEqual(arraysize(expected_annex_b_data), annexb_buffer.size());
+
+ int data_comparison_result =
+ memcmp(expected_annex_b_data, annexb_buffer.data(), arraysize(expected_annex_b_data));
+
+ XCTAssertEqual(0, data_comparison_result);
+}
+
+- (void)testH264CMSampleBufferToAnnexBBufferWithKeyframe {
+ // clang-format off
+ const uint8_t cmsample_data[] = {
+ 0x00, 0x00, 0x00, 0x04,
+ 0x01, 0x00, 0x00, 0xFF, // first chunk, 4 bytes
+ 0x00, 0x00, 0x00, 0x02,
+ 0xAA, 0xFF}; // second chunk, 2 bytes
+
+ const uint8_t expected_annex_b_data[] = {
+ 0x00, 0x00, 0x00, 0x01,
+ 0x01, 0x00, 0x00, 0xFF, // first chunk, 4 bytes
+ 0x00, 0x00, 0x00, 0x01,
+ 0xAA, 0xFF}; // second chunk, 2 bytes
+ // clang-format on
+
+ rtc::Buffer annexb_buffer(arraysize(cmsample_data));
+ CMSampleBufferRef sample_buffer =
+ [self createCMSampleBufferRef:(void*)cmsample_data cmsampleSize:arraysize(cmsample_data)];
+
+ Boolean result = webrtc::H264CMSampleBufferToAnnexBBuffer(sample_buffer,
+ /* is_keyframe */ true,
+ &annexb_buffer);
+
+ XCTAssertTrue(result);
+
+ XCTAssertEqual(arraysize(SPS_PPS_BUFFER) + arraysize(expected_annex_b_data),
+ annexb_buffer.size());
+
+ XCTAssertEqual(0, memcmp(SPS_PPS_BUFFER, annexb_buffer.data(), arraysize(SPS_PPS_BUFFER)));
+
+ XCTAssertEqual(0,
+ memcmp(expected_annex_b_data,
+ annexb_buffer.data() + arraysize(SPS_PPS_BUFFER),
+ arraysize(expected_annex_b_data)));
+}
+
+- (CMVideoFormatDescriptionRef)createDescription {
+ CMVideoFormatDescriptionRef description =
+ webrtc::CreateVideoFormatDescription(SPS_PPS_BUFFER, arraysize(SPS_PPS_BUFFER));
+ XCTAssertTrue(description);
+ return description;
+}
+
+- (CMSampleBufferRef)createCMSampleBufferRef:(void*)cmsampleData cmsampleSize:(size_t)cmsampleSize {
+ CMSampleBufferRef sample_buffer = nil;
+ OSStatus status;
+
+ CMVideoFormatDescriptionRef description = [self createDescription];
+ CMBlockBufferRef block_buffer = nullptr;
+
+ status = CMBlockBufferCreateWithMemoryBlock(nullptr,
+ cmsampleData,
+ cmsampleSize,
+ nullptr,
+ nullptr,
+ 0,
+ cmsampleSize,
+ kCMBlockBufferAssureMemoryNowFlag,
+ &block_buffer);
+ XCTAssertEqual(kCMBlockBufferNoErr, status);
+
+ status = CMSampleBufferCreate(nullptr,
+ block_buffer,
+ true,
+ nullptr,
+ nullptr,
+ description,
+ 1,
+ 0,
+ nullptr,
+ 0,
+ nullptr,
+ &sample_buffer);
+ XCTAssertEqual(noErr, status);
+
+ return sample_buffer;
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/objc_video_decoder_factory_tests.mm b/third_party/libwebrtc/sdk/objc/unittests/objc_video_decoder_factory_tests.mm
new file mode 100644
index 0000000000..f44d831d29
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/objc_video_decoder_factory_tests.mm
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <OCMock/OCMock.h>
+#import <XCTest/XCTest.h>
+
+#include "sdk/objc/native/src/objc_video_decoder_factory.h"
+
+#import "base/RTCMacros.h"
+#import "base/RTCVideoDecoder.h"
+#import "base/RTCVideoDecoderFactory.h"
+#include "media/base/codec.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/gunit.h"
+
+id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateDecoderFactoryReturning(int return_code) {
+ id decoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoder)));
+ OCMStub([decoderMock startDecodeWithNumberOfCores:1]).andReturn(return_code);
+ OCMStub([decoderMock decode:[OCMArg any]
+ missingFrames:NO
+ codecSpecificInfo:[OCMArg any]
+ renderTimeMs:0])
+ .andReturn(return_code);
+ OCMStub([decoderMock releaseDecoder]).andReturn(return_code);
+
+ id decoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoderFactory)));
+ RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil];
+ OCMStub([decoderFactoryMock supportedCodecs]).andReturn(@[ supported ]);
+ OCMStub([decoderFactoryMock createDecoder:[OCMArg any]]).andReturn(decoderMock);
+ return decoderFactoryMock;
+}
+
+id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateOKDecoderFactory() {
+ return CreateDecoderFactoryReturning(WEBRTC_VIDEO_CODEC_OK);
+}
+
+id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateErrorDecoderFactory() {
+ return CreateDecoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR);
+}
+
+std::unique_ptr<webrtc::VideoDecoder> GetObjCDecoder(
+ id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> factory) {
+ webrtc::ObjCVideoDecoderFactory decoder_factory(factory);
+ return decoder_factory.CreateVideoDecoder(webrtc::SdpVideoFormat(cricket::kH264CodecName));
+}
+
+#pragma mark -
+
+@interface ObjCVideoDecoderFactoryTests : XCTestCase
+@end
+
+@implementation ObjCVideoDecoderFactoryTests
+
+- (void)testConfigureReturnsTrueOnSuccess {
+ std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateOKDecoderFactory());
+
+ webrtc::VideoDecoder::Settings settings;
+ EXPECT_TRUE(decoder->Configure(settings));
+}
+
+- (void)testConfigureReturnsFalseOnFail {
+ std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateErrorDecoderFactory());
+
+ webrtc::VideoDecoder::Settings settings;
+ EXPECT_FALSE(decoder->Configure(settings));
+}
+
+- (void)testDecodeReturnsOKOnSuccess {
+ std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateOKDecoderFactory());
+
+ webrtc::EncodedImage encoded_image;
+ encoded_image.SetEncodedData(webrtc::EncodedImageBuffer::Create());
+
+ EXPECT_EQ(decoder->Decode(encoded_image, false, 0), WEBRTC_VIDEO_CODEC_OK);
+}
+
+- (void)testDecodeReturnsErrorOnFail {
+ std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateErrorDecoderFactory());
+
+ webrtc::EncodedImage encoded_image;
+ encoded_image.SetEncodedData(webrtc::EncodedImageBuffer::Create());
+
+ EXPECT_EQ(decoder->Decode(encoded_image, false, 0), WEBRTC_VIDEO_CODEC_ERROR);
+}
+
+- (void)testReleaseDecodeReturnsOKOnSuccess {
+ std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateOKDecoderFactory());
+
+ EXPECT_EQ(decoder->Release(), WEBRTC_VIDEO_CODEC_OK);
+}
+
+- (void)testReleaseDecodeReturnsErrorOnFail {
+ std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateErrorDecoderFactory());
+
+ EXPECT_EQ(decoder->Release(), WEBRTC_VIDEO_CODEC_ERROR);
+}
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/objc_video_encoder_factory_tests.mm b/third_party/libwebrtc/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
new file mode 100644
index 0000000000..9a4fee2e95
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
@@ -0,0 +1,148 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <OCMock/OCMock.h>
+#import <XCTest/XCTest.h>
+
+#include "sdk/objc/native/src/objc_video_encoder_factory.h"
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_encoder.h"
+#import "base/RTCVideoEncoder.h"
+#import "base/RTCVideoEncoderFactory.h"
+#import "base/RTCVideoFrameBuffer.h"
+#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/gunit.h"
+#include "sdk/objc/native/src/objc_frame_buffer.h"
+
+id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateEncoderFactoryReturning(int return_code) {
+ id encoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoder)));
+ OCMStub([encoderMock startEncodeWithSettings:[OCMArg any] numberOfCores:1])
+ .andReturn(return_code);
+ OCMStub([encoderMock encode:[OCMArg any] codecSpecificInfo:[OCMArg any] frameTypes:[OCMArg any]])
+ .andReturn(return_code);
+ OCMStub([encoderMock releaseEncoder]).andReturn(return_code);
+ OCMStub([encoderMock setBitrate:0 framerate:0]).andReturn(return_code);
+
+ id encoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoderFactory)));
+ RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported =
+ [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil];
+ OCMStub([encoderFactoryMock supportedCodecs]).andReturn(@[ supported ]);
+ OCMStub([encoderFactoryMock implementations]).andReturn(@[ supported ]);
+ OCMStub([encoderFactoryMock createEncoder:[OCMArg any]]).andReturn(encoderMock);
+ return encoderFactoryMock;
+}
+
+id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateOKEncoderFactory() {
+ return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_OK);
+}
+
+id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateErrorEncoderFactory() {
+ return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR);
+}
+
+std::unique_ptr<webrtc::VideoEncoder> GetObjCEncoder(
+ id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> factory) {
+ webrtc::ObjCVideoEncoderFactory encoder_factory(factory);
+ webrtc::SdpVideoFormat format("H264");
+ return encoder_factory.CreateVideoEncoder(format);
+}
+
+#pragma mark -
+
+@interface ObjCVideoEncoderFactoryTests : XCTestCase
+@end
+
+@implementation ObjCVideoEncoderFactoryTests
+
+- (void)testInitEncodeReturnsOKOnSuccess {
+ std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateOKEncoderFactory());
+
+ auto* settings = new webrtc::VideoCodec();
+ const webrtc::VideoEncoder::Capabilities kCapabilities(false);
+ EXPECT_EQ(encoder->InitEncode(settings, webrtc::VideoEncoder::Settings(kCapabilities, 1, 0)),
+ WEBRTC_VIDEO_CODEC_OK);
+}
+
+- (void)testInitEncodeReturnsErrorOnFail {
+ std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateErrorEncoderFactory());
+
+ auto* settings = new webrtc::VideoCodec();
+ const webrtc::VideoEncoder::Capabilities kCapabilities(false);
+ EXPECT_EQ(encoder->InitEncode(settings, webrtc::VideoEncoder::Settings(kCapabilities, 1, 0)),
+ WEBRTC_VIDEO_CODEC_ERROR);
+}
+
+- (void)testEncodeReturnsOKOnSuccess {
+ std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateOKEncoderFactory());
+
+ CVPixelBufferRef pixel_buffer;
+ CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+ rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
+ webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_rotation(webrtc::kVideoRotation_0)
+ .set_timestamp_us(0)
+ .build();
+ std::vector<webrtc::VideoFrameType> frame_types;
+
+ EXPECT_EQ(encoder->Encode(frame, &frame_types), WEBRTC_VIDEO_CODEC_OK);
+}
+
+- (void)testEncodeReturnsErrorOnFail {
+ std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateErrorEncoderFactory());
+
+ CVPixelBufferRef pixel_buffer;
+ CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+ rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(
+ [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
+ webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_rotation(webrtc::kVideoRotation_0)
+ .set_timestamp_us(0)
+ .build();
+ std::vector<webrtc::VideoFrameType> frame_types;
+
+ EXPECT_EQ(encoder->Encode(frame, &frame_types), WEBRTC_VIDEO_CODEC_ERROR);
+}
+
+- (void)testReleaseEncodeReturnsOKOnSuccess {
+ std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateOKEncoderFactory());
+
+ EXPECT_EQ(encoder->Release(), WEBRTC_VIDEO_CODEC_OK);
+}
+
+- (void)testReleaseEncodeReturnsErrorOnFail {
+ std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateErrorEncoderFactory());
+
+ EXPECT_EQ(encoder->Release(), WEBRTC_VIDEO_CODEC_ERROR);
+}
+
+- (void)testGetSupportedFormats {
+ webrtc::ObjCVideoEncoderFactory encoder_factory(CreateOKEncoderFactory());
+ std::vector<webrtc::SdpVideoFormat> supportedFormats = encoder_factory.GetSupportedFormats();
+ EXPECT_EQ(supportedFormats.size(), 1u);
+ EXPECT_EQ(supportedFormats[0].name, "H264");
+}
+
+- (void)testGetImplementations {
+ webrtc::ObjCVideoEncoderFactory encoder_factory(CreateOKEncoderFactory());
+ std::vector<webrtc::SdpVideoFormat> supportedFormats = encoder_factory.GetImplementations();
+ EXPECT_EQ(supportedFormats.size(), 1u);
+ EXPECT_EQ(supportedFormats[0].name, "H264");
+}
+
+@end
diff --git a/third_party/libwebrtc/sdk/objc/unittests/scoped_cftyperef_tests.mm b/third_party/libwebrtc/sdk/objc/unittests/scoped_cftyperef_tests.mm
new file mode 100644
index 0000000000..be26720b95
--- /dev/null
+++ b/third_party/libwebrtc/sdk/objc/unittests/scoped_cftyperef_tests.mm
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <XCTest/XCTest.h>
+
+#include "sdk/objc/helpers/scoped_cftyperef.h"
+
+#include "test/gtest.h"
+
+namespace {
+struct TestType {
+ TestType() : has_value(true) {}
+ TestType(bool b) : has_value(b) {}
+ explicit operator bool() { return has_value; }
+ bool has_value;
+ int retain_count = 0;
+};
+
+typedef TestType* TestTypeRef;
+
+struct TestTypeTraits {
+ static TestTypeRef InvalidValue() { return TestTypeRef(false); }
+ static void Release(TestTypeRef t) { t->retain_count--; }
+ static TestTypeRef Retain(TestTypeRef t) {
+ t->retain_count++;
+ return t;
+ }
+};
+} // namespace
+
+using ScopedTestType = rtc::internal::ScopedTypeRef<TestTypeRef, TestTypeTraits>;
+
+// In these tests we sometime introduce variables just to
+// observe side-effects. Ignore the compilers complaints.
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wunused-variable"
+
+@interface ScopedTypeRefTests : XCTestCase
+@end
+
+@implementation ScopedTypeRefTests
+
+- (void)testShouldNotRetainByDefault {
+ TestType a;
+ ScopedTestType ref(&a);
+ EXPECT_EQ(0, a.retain_count);
+}
+
+- (void)testShouldRetainWithPolicy {
+ TestType a;
+ ScopedTestType ref(&a, rtc::RetainPolicy::RETAIN);
+ EXPECT_EQ(1, a.retain_count);
+}
+
+- (void)testShouldReleaseWhenLeavingScope {
+ TestType a;
+ EXPECT_EQ(0, a.retain_count);
+ {
+ ScopedTestType ref(&a, rtc::RetainPolicy::RETAIN);
+ EXPECT_EQ(1, a.retain_count);
+ }
+ EXPECT_EQ(0, a.retain_count);
+}
+
+- (void)testShouldBeCopyable {
+ TestType a;
+ EXPECT_EQ(0, a.retain_count);
+ {
+ ScopedTestType ref1(&a, rtc::RetainPolicy::RETAIN);
+ EXPECT_EQ(1, a.retain_count);
+ ScopedTestType ref2 = ref1;
+ EXPECT_EQ(2, a.retain_count);
+ }
+ EXPECT_EQ(0, a.retain_count);
+}
+
+- (void)testCanReleaseOwnership {
+ TestType a;
+ EXPECT_EQ(0, a.retain_count);
+ {
+ ScopedTestType ref(&a, rtc::RetainPolicy::RETAIN);
+ EXPECT_EQ(1, a.retain_count);
+ TestTypeRef b = ref.release();
+ }
+ EXPECT_EQ(1, a.retain_count);
+}
+
+- (void)testShouldBeTestableForTruthiness {
+ ScopedTestType ref;
+ EXPECT_FALSE(ref);
+ TestType a;
+ ref = &a;
+ EXPECT_TRUE(ref);
+ ref.release();
+ EXPECT_FALSE(ref);
+}
+
+- (void)testShouldProvideAccessToWrappedType {
+ TestType a;
+ ScopedTestType ref(&a);
+ EXPECT_EQ(&(a.retain_count), &(ref->retain_count));
+}
+
+@end
+
+#pragma clang diagnostic pop