summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/webrtc/media
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/libwebrtc/webrtc/media')
-rw-r--r--third_party/libwebrtc/webrtc/media/BUILD.gn586
-rw-r--r--third_party/libwebrtc/webrtc/media/DEPS25
-rw-r--r--third_party/libwebrtc/webrtc/media/OWNERS11
-rw-r--r--third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.cc118
-rw-r--r--third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.h84
-rw-r--r--third_party/libwebrtc/webrtc/media/base/audiosource.h49
-rw-r--r--third_party/libwebrtc/webrtc/media/base/codec.cc367
-rw-r--r--third_party/libwebrtc/webrtc/media/base/codec.h264
-rw-r--r--third_party/libwebrtc/webrtc/media/base/codec_unittest.cc326
-rw-r--r--third_party/libwebrtc/webrtc/media/base/cryptoparams.h17
-rw-r--r--third_party/libwebrtc/webrtc/media/base/device.h36
-rw-r--r--third_party/libwebrtc/webrtc/media/base/fakemediaengine.h978
-rw-r--r--third_party/libwebrtc/webrtc/media/base/fakenetworkinterface.h228
-rw-r--r--third_party/libwebrtc/webrtc/media/base/fakertp.cc66
-rw-r--r--third_party/libwebrtc/webrtc/media/base/fakertp.h140
-rw-r--r--third_party/libwebrtc/webrtc/media/base/fakevideocapturer.h152
-rw-r--r--third_party/libwebrtc/webrtc/media/base/fakevideorenderer.h142
-rw-r--r--third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.cc310
-rw-r--r--third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.h107
-rw-r--r--third_party/libwebrtc/webrtc/media/base/mediachannel.h1254
-rw-r--r--third_party/libwebrtc/webrtc/media/base/mediaconstants.cc113
-rw-r--r--third_party/libwebrtc/webrtc/media/base/mediaconstants.h137
-rw-r--r--third_party/libwebrtc/webrtc/media/base/mediaengine.cc40
-rw-r--r--third_party/libwebrtc/webrtc/media/base/mediaengine.h188
-rw-r--r--third_party/libwebrtc/webrtc/media/base/rtpdataengine.cc355
-rw-r--r--third_party/libwebrtc/webrtc/media/base/rtpdataengine.h111
-rw-r--r--third_party/libwebrtc/webrtc/media/base/rtpdataengine_unittest.cc377
-rw-r--r--third_party/libwebrtc/webrtc/media/base/rtputils.cc473
-rw-r--r--third_party/libwebrtc/webrtc/media/base/rtputils.h90
-rw-r--r--third_party/libwebrtc/webrtc/media/base/rtputils_unittest.cc353
-rw-r--r--third_party/libwebrtc/webrtc/media/base/streamparams.cc268
-rw-r--r--third_party/libwebrtc/webrtc/media/base/streamparams.h332
-rw-r--r--third_party/libwebrtc/webrtc/media/base/streamparams_unittest.cc310
-rw-r--r--third_party/libwebrtc/webrtc/media/base/test/mock_mediachannel.h35
-rw-r--r--third_party/libwebrtc/webrtc/media/base/testutils.cc170
-rw-r--r--third_party/libwebrtc/webrtc/media/base/testutils.h151
-rw-r--r--third_party/libwebrtc/webrtc/media/base/turnutils.cc127
-rw-r--r--third_party/libwebrtc/webrtc/media/base/turnutils.h30
-rw-r--r--third_party/libwebrtc/webrtc/media/base/turnutils_unittest.cc120
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videoadapter.cc293
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videoadapter.h104
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videoadapter_unittest.cc1096
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videobroadcaster.cc131
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videobroadcaster.h70
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videobroadcaster_unittest.cc195
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videocapturer.cc381
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videocapturer.h289
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videocapturer_unittest.cc786
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videocapturerfactory.h32
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videocommon.cc79
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videocommon.h229
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videocommon_unittest.cc94
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videoengine_unittest.h951
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videosinkinterface.h34
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videosourcebase.cc58
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videosourcebase.h48
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videosourceinterface.cc17
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videosourceinterface.h59
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/adm_helpers.cc81
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/adm_helpers.h27
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/apm_helpers.cc190
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/apm_helpers.h52
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/apm_helpers_unittest.cc287
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/constants.cc18
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/constants.h25
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/convert_legacy_video_factory.cc226
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/convert_legacy_video_factory.h38
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/encoder_simulcast_proxy.cc66
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/encoder_simulcast_proxy.h62
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/encoder_simulcast_proxy_unittest.cc242
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/fakewebrtccall.cc648
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/fakewebrtccall.h333
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/fakewebrtcdeviceinfo.h109
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/fakewebrtcvcmfactory.h49
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/fakewebrtcvideocapturemodule.h87
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/fakewebrtcvideoengine.h260
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/fakewebrtcvoiceengine.h124
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/internaldecoderfactory.cc50
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/internaldecoderfactory.h30
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/internaldecoderfactory_unittest.cc27
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/internalencoderfactory.cc59
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/internalencoderfactory.h33
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/nullwebrtcvideoengine.h49
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/nullwebrtcvideoengine_unittest.cc49
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/payload_type_mapper.cc150
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/payload_type_mapper.h56
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/payload_type_mapper_unittest.cc138
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/scopedvideodecoder.cc100
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/scopedvideodecoder.h34
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/scopedvideoencoder.cc126
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/scopedvideoencoder.h33
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/simulcast.cc338
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/simulcast.h63
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/simulcast_encoder_adapter.cc562
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/simulcast_encoder_adapter.h120
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/simulcast_encoder_adapter_unittest.cc831
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/simulcast_unittest.cc56
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/videodecodersoftwarefallbackwrapper.cc141
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/videodecodersoftwarefallbackwrapper.h64
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/videodecodersoftwarefallbackwrapper_unittest.cc209
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/videoencodersoftwarefallbackwrapper.cc308
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/videoencodersoftwarefallbackwrapper.h102
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/videoencodersoftwarefallbackwrapper_unittest.cc544
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/vp8_encoder_simulcast_proxy.cc84
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/vp8_encoder_simulcast_proxy.h57
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/vp8_encoder_simulcast_proxy_unittest.cc139
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcmediaengine.cc227
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcmediaengine.h93
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcmediaengine_unittest.cc246
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturer.cc342
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturer.h85
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturer_unittest.cc134
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturerfactory.cc33
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturerfactory.h29
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvideodecoderfactory.cc36
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvideodecoderfactory.h56
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvideoencoderfactory.cc20
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvideoencoderfactory.h50
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvideoencoderfactory_unittest.cc45
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvideoengine.cc2616
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvideoengine.h533
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvideoengine_unittest.cc4866
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvoe.h88
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvoiceengine.cc2365
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvoiceengine.h304
-rw-r--r--third_party/libwebrtc/webrtc/media/engine/webrtcvoiceengine_unittest.cc3575
-rw-r--r--third_party/libwebrtc/webrtc/media/rtc_audio_video_gn/moz.build257
-rw-r--r--third_party/libwebrtc/webrtc/media/rtc_h264_profile_id_gn/moz.build217
-rw-r--r--third_party/libwebrtc/webrtc/media/rtc_media_base_gn/moz.build236
-rw-r--r--third_party/libwebrtc/webrtc/media/rtc_media_gn/moz.build194
130 files changed, 38109 insertions, 0 deletions
diff --git a/third_party/libwebrtc/webrtc/media/BUILD.gn b/third_party/libwebrtc/webrtc/media/BUILD.gn
new file mode 100644
index 0000000000..385163deaf
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/BUILD.gn
@@ -0,0 +1,586 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("//build/config/linux/pkg_config.gni")
+import("../webrtc.gni")
+
+group("media") {
+ public_deps = [
+ ":rtc_media",
+ ":rtc_media_base",
+ ]
+}
+
+config("rtc_media_defines_config") {
+ defines = [
+ "HAVE_WEBRTC_VIDEO",
+ "HAVE_WEBRTC_VOICE",
+ ]
+}
+
+config("rtc_media_warnings_config") {
+ # GN orders flags on a target before flags from configs. The default config
+ # adds these flags so to cancel them out they need to come from a config and
+ # cannot be on the target directly.
+ if (!is_win) {
+ cflags = [ "-Wno-deprecated-declarations" ]
+ }
+}
+
+rtc_source_set("rtc_h264_profile_id") {
+ sources = [
+ "base/h264_profile_level_id.cc",
+ "base/h264_profile_level_id.h",
+ ]
+
+ if (!build_with_chromium && is_clang) {
+ # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
+ suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
+ }
+
+ deps = [
+ "..:webrtc_common",
+ "../api:optional",
+ "../rtc_base:rtc_base",
+ "../rtc_base:rtc_base_approved",
+ ]
+}
+
+rtc_static_library("rtc_media_base") {
+ defines = []
+ libs = []
+ deps = []
+ public_deps = []
+ sources = [
+ "base/adaptedvideotracksource.cc",
+ "base/adaptedvideotracksource.h",
+ "base/audiosource.h",
+ "base/codec.cc",
+ "base/codec.h",
+ "base/cryptoparams.h",
+ "base/device.h",
+ "base/mediachannel.h",
+ "base/mediaconstants.cc",
+ "base/mediaconstants.h",
+ "base/mediaengine.cc",
+ "base/mediaengine.h",
+ "base/streamparams.cc",
+ "base/streamparams.h",
+ "base/videoadapter.cc",
+ "base/videoadapter.h",
+ "base/videobroadcaster.cc",
+ "base/videobroadcaster.h",
+ "base/videocapturer.cc",
+ "base/videocapturer.h",
+ "base/videocapturerfactory.h",
+ "base/videocommon.cc",
+ "base/videocommon.h",
+ "base/videosourcebase.cc",
+ "base/videosourcebase.h",
+
+ # TODO(aleloi): add "base/videosinkinterface.h"
+ "base/videosourceinterface.cc",
+
+ # TODO(aleloi): add "base/videosourceinterface.h"
+ ]
+
+ if (!build_with_mozilla) {
+ srcs += [
+ "base/rtpdataengine.cc",
+ "base/rtpdataengine.h",
+ "base/rtputils.cc",
+ "base/rtputils.h",
+ "base/turnutils.cc",
+ "base/turnutils.h",
+ ]
+ }
+
+ if (!build_with_chromium && is_clang) {
+ # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
+ suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
+ }
+
+ include_dirs = []
+ if (rtc_build_libyuv) {
+ deps += [ "$rtc_libyuv_dir" ]
+ public_deps += [ "$rtc_libyuv_dir" ]
+ } else {
+ # Need to add a directory normally exported by libyuv.
+ include_dirs += [ "$rtc_libyuv_dir/include" ]
+ }
+
+ deps += [
+ "..:webrtc_common",
+ "../api:optional",
+ "../api:video_frame_api",
+ "../api:video_frame_api_i420",
+ "../api/audio_codecs:audio_codecs_api",
+ "../api/video_codecs:video_codecs_api",
+ "../call:call_interfaces",
+ "../call:video_stream_api",
+ "../modules/audio_processing:audio_processing_statistics",
+ "../rtc_base:rtc_base",
+ "../rtc_base:rtc_base_approved",
+ "../system_wrappers:field_trial_api",
+ ]
+
+ public_deps += [ ":rtc_h264_profile_id" ]
+
+ if (!build_with_mozilla) {
+ deps += [
+ "../api:libjingle_peerconnection_api",
+ "../p2p"
+ ]
+ }
+
+ if (is_nacl) {
+ deps += [ "//native_client_sdk/src/libraries/nacl_io" ]
+ }
+}
+
+rtc_static_library("rtc_audio_video") {
+ defines = []
+ libs = []
+ deps = [
+ "../api:video_frame_api_i420",
+ "../modules/video_coding:video_coding_utility",
+ ]
+ sources = [
+ "engine/adm_helpers.cc",
+ "engine/adm_helpers.h",
+ "engine/apm_helpers.cc",
+ "engine/apm_helpers.h",
+ "engine/constants.cc",
+ "engine/constants.h",
+ "engine/convert_legacy_video_factory.cc",
+ "engine/convert_legacy_video_factory.h",
+ "engine/encoder_simulcast_proxy.cc",
+ "engine/encoder_simulcast_proxy.h",
+ "engine/internaldecoderfactory.cc",
+ "engine/internaldecoderfactory.h",
+ "engine/internalencoderfactory.cc",
+ "engine/internalencoderfactory.h",
+ "engine/nullwebrtcvideoengine.h",
+ "engine/payload_type_mapper.cc",
+ "engine/payload_type_mapper.h",
+ "engine/scopedvideodecoder.cc",
+ "engine/scopedvideodecoder.h",
+ "engine/scopedvideoencoder.cc",
+ "engine/scopedvideoencoder.h",
+ "engine/simulcast.cc",
+ "engine/simulcast.h",
+ "engine/simulcast_encoder_adapter.cc",
+ "engine/simulcast_encoder_adapter.h",
+ "engine/videodecodersoftwarefallbackwrapper.cc",
+ "engine/videodecodersoftwarefallbackwrapper.h",
+ "engine/videoencodersoftwarefallbackwrapper.cc",
+ "engine/videoencodersoftwarefallbackwrapper.h",
+ "engine/webrtcvideodecoderfactory.cc",
+ "engine/webrtcvideodecoderfactory.h",
+ "engine/webrtcvideoencoderfactory.cc",
+ "engine/webrtcvideoencoderfactory.h",
+ ]
+
+ if (!build_with_mozilla) {
+ sources += [
+ "engine/webrtcvideocapturer.cc",
+ "engine/webrtcvideocapturer.h",
+ "engine/webrtcvideocapturerfactory.cc",
+ "engine/webrtcvideocapturerfactory.h",
+ "engine/webrtcmediaengine.cc",
+ "engine/webrtcmediaengine.h",
+ "engine/webrtcvideoengine.cc",
+ "engine/webrtcvideoengine.h",
+ "engine/webrtcvoe.h",
+ "engine/webrtcvoiceengine.cc",
+ "engine/webrtcvoiceengine.h",
+ ]
+ }
+
+ configs += [ ":rtc_media_warnings_config" ]
+
+ if (!build_with_chromium && is_clang) {
+ # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
+ suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
+ }
+
+ if (is_win) {
+ cflags = [
+ "/wd4245", # conversion from "int" to "size_t", signed/unsigned mismatch.
+ "/wd4267", # conversion from "size_t" to "int", possible loss of data.
+ "/wd4389", # signed/unsigned mismatch.
+ ]
+ }
+
+ if (rtc_enable_intelligibility_enhancer) {
+ defines += [ "WEBRTC_INTELLIGIBILITY_ENHANCER=1" ]
+ } else {
+ defines += [ "WEBRTC_INTELLIGIBILITY_ENHANCER=0" ]
+ }
+
+ if (rtc_opus_support_120ms_ptime) {
+ defines += [ "WEBRTC_OPUS_SUPPORT_120MS_PTIME=1" ]
+ } else {
+ defines += [ "WEBRTC_OPUS_SUPPORT_120MS_PTIME=0" ]
+ }
+
+ include_dirs = []
+ if (rtc_build_libyuv) {
+ deps += [ "$rtc_libyuv_dir" ]
+ public_deps = [
+ "$rtc_libyuv_dir",
+ ]
+ } else {
+ # Need to add a directory normally exported by libyuv.
+ include_dirs += [ "$rtc_libyuv_dir/include" ]
+ }
+
+ public_configs = []
+ if (build_with_chromium) {
+ deps += [ "../modules/video_capture:video_capture" ]
+ } else {
+ public_configs += [ ":rtc_media_defines_config" ]
+ deps += [ "../modules/video_capture:video_capture_internal_impl" ]
+ }
+ if (rtc_enable_protobuf) {
+ deps += [ "../modules/audio_processing/aec_dump:aec_dump_impl" ]
+ } else {
+ deps += [ "../modules/audio_processing/aec_dump:null_aec_dump_factory" ]
+ }
+ deps += [
+ ":rtc_media_base",
+ "..:webrtc_common",
+ "../api:call_api",
+ "../api:optional",
+ "../api:transport_api",
+ "../api:video_frame_api",
+ "../api/audio_codecs:audio_codecs_api",
+ "../api/video_codecs:video_codecs_api",
+ "../call",
+ "../call:video_stream_api",
+ "../common_video:common_video",
+ "../modules/audio_coding:rent_a_codec",
+ "../modules/audio_device:audio_device",
+ "../modules/audio_mixer:audio_mixer_impl",
+ "../modules/audio_processing:audio_processing",
+ "../modules/audio_processing/aec_dump",
+ "../modules/video_capture:video_capture_module",
+ "../modules/video_coding",
+ "../modules/video_coding:webrtc_h264",
+ "../modules/video_coding:webrtc_vp8",
+ "../modules/video_coding:webrtc_vp9",
+ "../rtc_base:rtc_base",
+ "../rtc_base:rtc_base_approved",
+ "../rtc_base:rtc_task_queue",
+ "../rtc_base:sequenced_task_checker",
+ "../system_wrappers",
+ "../video",
+ "../voice_engine",
+ ]
+
+ if (!build_with_mozilla) {
+ deps += [
+ "../api:libjingle_peerconnection_api",
+ "../p2p:rtc_p2p",
+ "../pc:rtc_pc_base",
+ ]
+ }
+}
+
+rtc_static_library("rtc_data") {
+ defines = []
+ deps = []
+
+ if (rtc_enable_sctp) {
+ sources = [
+ "sctp/sctptransport.cc",
+ "sctp/sctptransport.h",
+ "sctp/sctptransportinternal.h",
+ ]
+ }
+
+ configs += [ ":rtc_media_warnings_config" ]
+
+ if (!build_with_chromium && is_clang) {
+ # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
+ suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
+ }
+
+ if (is_win) {
+ cflags = [
+ "/wd4245", # conversion from "int" to "size_t", signed/unsigned mismatch.
+ "/wd4267", # conversion from "size_t" to "int", possible loss of data.
+ "/wd4389", # signed/unsigned mismatch.
+ ]
+ }
+
+ if (rtc_enable_sctp && rtc_build_usrsctp) {
+ include_dirs = [
+ # TODO(jiayl): move this into the public_configs of
+ # //third_party/usrsctp/BUILD.gn.
+ "//third_party/usrsctp/usrsctplib",
+ ]
+ deps += [ "//third_party/usrsctp" ]
+ }
+
+ deps += [
+ ":rtc_media_base",
+ "..:webrtc_common",
+ "../api:call_api",
+ "../api:transport_api",
+ "../rtc_base:rtc_base",
+ "../rtc_base:rtc_base_approved",
+ "../system_wrappers",
+ ]
+
+ if (!build_with_mozilla) {
+ deps += [ "../p2p:rtc_p2p" ]
+ }
+}
+
+rtc_source_set("rtc_media") {
+ public_deps = [
+ ":rtc_audio_video",
+ ]
+
+ if (!build_with_mozilla) {
+ public_deps += [
+ ":rtc_data",
+ ]
+ }
+}
+
+if (rtc_include_tests) {
+ config("rtc_unittest_main_config") {
+ # GN orders flags on a target before flags from configs. The default config
+ # adds -Wall, and this flag have to be after -Wall -- so they need to
+ # come from a config and can"t be on the target directly.
+ if (is_clang && is_ios) {
+ cflags = [ "-Wno-unused-variable" ]
+ }
+ }
+
+ rtc_source_set("rtc_media_tests_utils") {
+ testonly = true
+
+ include_dirs = []
+ public_deps = []
+ deps = [
+ "../api:video_frame_api_i420",
+ "../call:video_stream_api",
+ "../modules/audio_coding:rent_a_codec",
+ "../modules/audio_processing:audio_processing",
+ "../modules/rtp_rtcp:rtp_rtcp",
+ "../modules/video_coding:video_coding_utility",
+ "../p2p:rtc_p2p",
+ ]
+ sources = [
+ "base/fakemediaengine.h",
+ "base/fakenetworkinterface.h",
+ "base/fakertp.cc",
+ "base/fakertp.h",
+ "base/fakevideocapturer.h",
+ "base/fakevideorenderer.h",
+ "base/test/mock_mediachannel.h",
+ "base/testutils.cc",
+ "base/testutils.h",
+ "engine/fakewebrtccall.cc",
+ "engine/fakewebrtccall.h",
+ "engine/fakewebrtcdeviceinfo.h",
+ "engine/fakewebrtcvcmfactory.h",
+ "engine/fakewebrtcvideocapturemodule.h",
+ "engine/fakewebrtcvideoengine.h",
+ "engine/fakewebrtcvoiceengine.h",
+ ]
+
+ configs += [ ":rtc_unittest_main_config" ]
+
+ if (rtc_build_libyuv) {
+ deps += [ "$rtc_libyuv_dir" ]
+ public_deps += [ "$rtc_libyuv_dir" ]
+ } else {
+ # Need to add a directory normally exported by libyuv.
+ include_dirs += [ "$rtc_libyuv_dir/include" ]
+ }
+
+ if (!build_with_chromium && is_clang) {
+ # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
+ suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
+ }
+
+ deps += [
+ ":rtc_media",
+ ":rtc_media_base",
+ "..:webrtc_common",
+ "../api:call_api",
+ "../api:video_frame_api",
+ "../api/video_codecs:video_codecs_api",
+ "../call:call_interfaces",
+ "../rtc_base:rtc_base",
+ "../rtc_base:rtc_base_approved",
+ "../rtc_base:rtc_base_tests_utils",
+ "../test:test_support",
+ "//testing/gtest",
+ ]
+ public_deps += [ "//testing/gmock" ]
+ }
+
+ config("rtc_media_unittests_config") {
+ # GN orders flags on a target before flags from configs. The default config
+ # adds -Wall, and this flag have to be after -Wall -- so they need to
+ # come from a config and can"t be on the target directly.
+ # TODO(kjellander): Make the code compile without disabling these flags.
+ # See https://bugs.webrtc.org/3307.
+ if (is_clang && is_win) {
+ cflags = [
+ # See https://bugs.chromium.org/p/webrtc/issues/detail?id=6266
+ # for -Wno-sign-compare
+ "-Wno-sign-compare",
+ ]
+ }
+ if (!is_win) {
+ cflags = [ "-Wno-sign-compare" ]
+ }
+ }
+
+ rtc_media_unittests_resources = [
+ "../resources/media/captured-320x240-2s-48.frames",
+ "../resources/media/faces.1280x720_P420.yuv",
+ "../resources/media/faces_I420.jpg",
+ "../resources/media/faces_I422.jpg",
+ "../resources/media/faces_I444.jpg",
+ "../resources/media/faces_I411.jpg",
+ "../resources/media/faces_I400.jpg",
+ ]
+
+ if (is_ios) {
+ bundle_data("rtc_media_unittests_bundle_data") {
+ testonly = true
+ sources = rtc_media_unittests_resources
+ outputs = [
+ "{{bundle_resources_dir}}/{{source_file_part}}",
+ ]
+ }
+ }
+
+ rtc_test("rtc_media_unittests") {
+ testonly = true
+
+ defines = []
+ deps = [
+ "../api:video_frame_api_i420",
+ "../pc:rtc_pc",
+ "../test:field_trial",
+ ]
+ sources = [
+ "base/codec_unittest.cc",
+ "base/rtpdataengine_unittest.cc",
+ "base/rtputils_unittest.cc",
+ "base/streamparams_unittest.cc",
+ "base/turnutils_unittest.cc",
+ "base/videoadapter_unittest.cc",
+ "base/videobroadcaster_unittest.cc",
+ "base/videocapturer_unittest.cc",
+ "base/videocommon_unittest.cc",
+ "base/videoengine_unittest.h",
+ "engine/apm_helpers_unittest.cc",
+ "engine/encoder_simulcast_proxy_unittest.cc",
+ "engine/internaldecoderfactory_unittest.cc",
+ "engine/nullwebrtcvideoengine_unittest.cc",
+ "engine/payload_type_mapper_unittest.cc",
+ "engine/simulcast_encoder_adapter_unittest.cc",
+ "engine/simulcast_unittest.cc",
+ "engine/videodecodersoftwarefallbackwrapper_unittest.cc",
+ "engine/videoencodersoftwarefallbackwrapper_unittest.cc",
+ "engine/webrtcmediaengine_unittest.cc",
+ "engine/webrtcvideocapturer_unittest.cc",
+ "engine/webrtcvideoencoderfactory_unittest.cc",
+ "engine/webrtcvideoengine_unittest.cc",
+ ]
+
+ # TODO(kthelgason): Reenable this test on iOS.
+ # See bugs.webrtc.org/5569
+ if (!is_ios) {
+ sources += [ "engine/webrtcvoiceengine_unittest.cc" ]
+ }
+
+ if (rtc_enable_sctp) {
+ sources += [ "sctp/sctptransport_unittest.cc" ]
+ }
+
+ configs += [ ":rtc_media_unittests_config" ]
+
+ if (rtc_use_h264) {
+ defines += [ "WEBRTC_USE_H264" ]
+ }
+
+ if (rtc_opus_support_120ms_ptime) {
+ defines += [ "WEBRTC_OPUS_SUPPORT_120MS_PTIME=1" ]
+ } else {
+ defines += [ "WEBRTC_OPUS_SUPPORT_120MS_PTIME=0" ]
+ }
+
+ if (is_win) {
+ cflags = [
+ "/wd4245", # conversion from int to size_t, signed/unsigned mismatch.
+ "/wd4373", # virtual function override.
+ "/wd4389", # signed/unsigned mismatch.
+ ]
+ }
+
+ if (!build_with_chromium && is_clang) {
+ suppressed_configs += [
+ "//build/config/clang:extra_warnings",
+
+ # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
+ "//build/config/clang:find_bad_constructs",
+ ]
+ }
+
+ data = rtc_media_unittests_resources
+
+ if (is_android) {
+ deps += [ "//testing/android/native_test:native_test_support" ]
+ shard_timeout = 900
+ }
+
+ if (is_ios) {
+ deps += [ ":rtc_media_unittests_bundle_data" ]
+ }
+
+ deps += [
+ ":rtc_media",
+ ":rtc_media_base",
+ ":rtc_media_tests_utils",
+ "../api:mock_video_codec_factory",
+ "../api:video_frame_api",
+ "../api/audio_codecs:builtin_audio_decoder_factory",
+ "../api/audio_codecs:builtin_audio_encoder_factory",
+ "../api/video_codecs:video_codecs_api",
+ "../audio",
+ "../call:call_interfaces",
+ "../common_video:common_video",
+ "../logging:rtc_event_log_api",
+ "../modules/audio_device:mock_audio_device",
+ "../modules/audio_processing:audio_processing",
+ "../modules/video_coding:simulcast_test_utility",
+ "../modules/video_coding:video_coding_utility",
+ "../modules/video_coding:webrtc_vp8",
+ "../p2p:p2p_test_utils",
+ "../rtc_base:rtc_base",
+ "../rtc_base:rtc_base_approved",
+ "../rtc_base:rtc_base_tests_main",
+ "../rtc_base:rtc_base_tests_utils",
+ "../system_wrappers:metrics_default",
+ "../test:audio_codec_mocks",
+ "../test:test_support",
+ "../test:video_test_common",
+ "../voice_engine:voice_engine",
+ ]
+ }
+}
diff --git a/third_party/libwebrtc/webrtc/media/DEPS b/third_party/libwebrtc/webrtc/media/DEPS
new file mode 100644
index 0000000000..4dd449c003
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/DEPS
@@ -0,0 +1,25 @@
+include_rules = [
+ "+api",
+ "+call",
+ "+common_video",
+ "+logging/rtc_event_log",
+ "+modules/audio_coding",
+ "+modules/audio_device",
+ "+modules/audio_mixer",
+ "+modules/audio_processing",
+ "+modules/rtp_rtcp",
+ "+modules/video_capture",
+ "+modules/video_coding",
+ "+p2p",
+ "+pc",
+ "+sound",
+ "+system_wrappers",
+ "+voice_engine",
+ "+usrsctplib",
+]
+
+specific_include_rules = {
+ "win32devicemanager\.cc": [
+ "+third_party/logitech/files/logitechquickcam.h",
+ ],
+}
diff --git a/third_party/libwebrtc/webrtc/media/OWNERS b/third_party/libwebrtc/webrtc/media/OWNERS
new file mode 100644
index 0000000000..bf977f493e
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/OWNERS
@@ -0,0 +1,11 @@
+magjed@webrtc.org
+mflodman@webrtc.org
+perkj@webrtc.org
+pthatcher@webrtc.org
+solenberg@webrtc.org
+deadbeef@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gn=*
+per-file *.gni=*
diff --git a/third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.cc b/third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.cc
new file mode 100644
index 0000000000..5a7168bfb8
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.cc
@@ -0,0 +1,118 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/adaptedvideotracksource.h"
+
+#include "api/video/i420_buffer.h"
+
+namespace rtc {
+
+AdaptedVideoTrackSource::AdaptedVideoTrackSource() {
+ thread_checker_.DetachFromThread();
+}
+
+AdaptedVideoTrackSource::AdaptedVideoTrackSource(int required_alignment)
+ : video_adapter_(required_alignment) {
+ thread_checker_.DetachFromThread();
+}
+
+bool AdaptedVideoTrackSource::GetStats(Stats* stats) {
+ rtc::CritScope lock(&stats_crit_);
+
+ if (!stats_) {
+ return false;
+ }
+
+ *stats = *stats_;
+ return true;
+}
+
+void AdaptedVideoTrackSource::OnFrame(const webrtc::VideoFrame& frame) {
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
+ frame.video_frame_buffer());
+ /* Note that this is a "best effort" approach to
+ wants.rotation_applied; apply_rotation_ can change from false to
+ true between the check of apply_rotation() and the call to
+ broadcaster_.OnFrame(), in which case we generate a frame with
+ pending rotation despite some sink with wants.rotation_applied ==
+ true was just added. The VideoBroadcaster enforces
+ synchronization for us in this case, by not passing the frame on
+ to sinks which don't want it. */
+ if (apply_rotation() && frame.rotation() != webrtc::kVideoRotation_0 &&
+ buffer->type() == webrtc::VideoFrameBuffer::Type::kI420) {
+ /* Apply pending rotation. */
+ broadcaster_.OnFrame(webrtc::VideoFrame(
+ webrtc::I420Buffer::Rotate(*buffer->GetI420(), frame.rotation()),
+ webrtc::kVideoRotation_0, frame.timestamp_us()));
+ } else {
+ broadcaster_.OnFrame(frame);
+ }
+}
+
+void AdaptedVideoTrackSource::AddOrUpdateSink(
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+
+ broadcaster_.AddOrUpdateSink(sink, wants);
+ OnSinkWantsChanged(broadcaster_.wants());
+}
+
+void AdaptedVideoTrackSource::RemoveSink(
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+
+ broadcaster_.RemoveSink(sink);
+ OnSinkWantsChanged(broadcaster_.wants());
+}
+
+bool AdaptedVideoTrackSource::apply_rotation() {
+ return broadcaster_.wants().rotation_applied;
+}
+
+void AdaptedVideoTrackSource::OnSinkWantsChanged(
+ const rtc::VideoSinkWants& wants) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ video_adapter_.OnResolutionFramerateRequest(
+ wants.target_pixel_count, wants.max_pixel_count, wants.max_framerate_fps);
+}
+
+bool AdaptedVideoTrackSource::AdaptFrame(int width,
+ int height,
+ int64_t time_us,
+ int* out_width,
+ int* out_height,
+ int* crop_width,
+ int* crop_height,
+ int* crop_x,
+ int* crop_y) {
+ {
+ rtc::CritScope lock(&stats_crit_);
+ stats_ = Stats{width, height};
+ }
+
+ if (!broadcaster_.frame_wanted()) {
+ return false;
+ }
+
+ if (!video_adapter_.AdaptFrameResolution(
+ width, height, time_us * rtc::kNumNanosecsPerMicrosec,
+ crop_width, crop_height, out_width, out_height)) {
+ broadcaster_.OnDiscardedFrame();
+ // VideoAdapter dropped the frame.
+ return false;
+ }
+
+ *crop_x = (width - *crop_width) / 2;
+ *crop_y = (height - *crop_height) / 2;
+ return true;
+}
+
+} // namespace rtc
diff --git a/third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.h b/third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.h
new file mode 100644
index 0000000000..0db381f5fb
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_ADAPTEDVIDEOTRACKSOURCE_H_
+#define MEDIA_BASE_ADAPTEDVIDEOTRACKSOURCE_H_
+
+#include "api/mediastreaminterface.h"
+#include "api/notifier.h"
+#include "media/base/videoadapter.h"
+#include "media/base/videobroadcaster.h"
+
+namespace rtc {
+
+// Base class for sources which needs video adaptation, e.g., video
+// capture sources. Sinks must be added and removed on one and only
+// one thread, while AdaptFrame and OnFrame may be called on any
+// thread.
+class AdaptedVideoTrackSource
+ : public webrtc::Notifier<webrtc::VideoTrackSourceInterface> {
+ public:
+ AdaptedVideoTrackSource();
+
+ protected:
+ // Allows derived classes to initialize |video_adapter_| with a custom
+ // alignment.
+ explicit AdaptedVideoTrackSource(int required_alignment);
+ // Checks the apply_rotation() flag. If the frame needs rotation, and it is a
+ // plain memory frame, it is rotated. Subclasses producing native frames must
+ // handle apply_rotation() themselves.
+ void OnFrame(const webrtc::VideoFrame& frame);
+
+ // Reports the appropriate frame size after adaptation. Returns true
+ // if a frame is wanted. Returns false if there are no interested
+ // sinks, or if the VideoAdapter decides to drop the frame.
+ bool AdaptFrame(int width,
+ int height,
+ int64_t time_us,
+ int* out_width,
+ int* out_height,
+ int* crop_width,
+ int* crop_height,
+ int* crop_x,
+ int* crop_y);
+
+ // Returns the current value of the apply_rotation flag, derived
+ // from the VideoSinkWants of registered sinks. The value is derived
+ // from sinks' wants, in AddOrUpdateSink and RemoveSink. Beware that
+ // when using this method from a different thread, the value may
+ // become stale before it is used.
+ bool apply_rotation();
+
+ cricket::VideoAdapter* video_adapter() { return &video_adapter_; }
+
+ private:
+ // Implements rtc::VideoSourceInterface.
+ void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override;
+ void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override;
+
+ // Part of VideoTrackSourceInterface.
+ bool GetStats(Stats* stats) override;
+
+ void OnSinkWantsChanged(const rtc::VideoSinkWants& wants);
+
+ rtc::ThreadChecker thread_checker_;
+
+ cricket::VideoAdapter video_adapter_;
+
+ rtc::CriticalSection stats_crit_;
+ rtc::Optional<Stats> stats_ RTC_GUARDED_BY(stats_crit_);
+
+ VideoBroadcaster broadcaster_;
+};
+
+} // namespace rtc
+
+#endif // MEDIA_BASE_ADAPTEDVIDEOTRACKSOURCE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/audiosource.h b/third_party/libwebrtc/webrtc/media/base/audiosource.h
new file mode 100644
index 0000000000..199b614850
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/audiosource.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_AUDIOSOURCE_H_
+#define MEDIA_BASE_AUDIOSOURCE_H_
+
+#include <cstddef>
+
+namespace cricket {
+
+// Abstract interface for providing the audio data.
+// TODO(deadbeef): Rename this to AudioSourceInterface, and rename
+// webrtc::AudioSourceInterface to AudioTrackSourceInterface.
+class AudioSource {
+ public:
+ class Sink {
+ public:
+ // Callback to receive data from the AudioSource.
+ virtual void OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames) = 0;
+
+ // Called when the AudioSource is going away.
+ virtual void OnClose() = 0;
+
+ protected:
+ virtual ~Sink() {}
+ };
+
+ // Sets a sink to the AudioSource. There can be only one sink connected
+ // to the source at a time.
+ virtual void SetSink(Sink* sink) = 0;
+
+ protected:
+ virtual ~AudioSource() {}
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_AUDIOSOURCE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/codec.cc b/third_party/libwebrtc/webrtc/media/base/codec.cc
new file mode 100644
index 0000000000..98e52d6848
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/codec.cc
@@ -0,0 +1,367 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/codec.h"
+
+#include <algorithm>
+#include <sstream>
+
+#include "media/base/h264_profile_level_id.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/stringencode.h"
+#include "rtc_base/stringutils.h"
+
+namespace cricket {
+
+FeedbackParams::FeedbackParams() = default;
+
+bool FeedbackParam::operator==(const FeedbackParam& other) const {
+ return _stricmp(other.id().c_str(), id().c_str()) == 0 &&
+ _stricmp(other.param().c_str(), param().c_str()) == 0;
+}
+
+bool FeedbackParams::operator==(const FeedbackParams& other) const {
+ return params_ == other.params_;
+}
+
+bool FeedbackParams::Has(const FeedbackParam& param) const {
+ return std::find(params_.begin(), params_.end(), param) != params_.end();
+}
+
+void FeedbackParams::Add(const FeedbackParam& param) {
+ if (param.id().empty()) {
+ return;
+ }
+ if (Has(param)) {
+ // Param already in |this|.
+ return;
+ }
+ params_.push_back(param);
+ RTC_CHECK(!HasDuplicateEntries());
+}
+
+void FeedbackParams::Intersect(const FeedbackParams& from) {
+ std::vector<FeedbackParam>::iterator iter_to = params_.begin();
+ while (iter_to != params_.end()) {
+ if (!from.Has(*iter_to)) {
+ iter_to = params_.erase(iter_to);
+ } else {
+ ++iter_to;
+ }
+ }
+}
+
+bool FeedbackParams::HasDuplicateEntries() const {
+ for (std::vector<FeedbackParam>::const_iterator iter = params_.begin();
+ iter != params_.end(); ++iter) {
+ for (std::vector<FeedbackParam>::const_iterator found = iter + 1;
+ found != params_.end(); ++found) {
+ if (*found == *iter) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+Codec::Codec(int id, const std::string& name, int clockrate)
+ : id(id), name(name), clockrate(clockrate) {}
+
+Codec::Codec() : id(0), clockrate(0) {}
+
+Codec::Codec(const Codec& c) = default;
+Codec::Codec(Codec&& c) = default;
+Codec::~Codec() = default;
+Codec& Codec::operator=(const Codec& c) = default;
+Codec& Codec::operator=(Codec&& c) = default;
+
+bool Codec::operator==(const Codec& c) const {
+ return this->id == c.id && // id is reserved in objective-c
+ name == c.name && clockrate == c.clockrate && params == c.params &&
+ feedback_params == c.feedback_params;
+}
+
+bool Codec::Matches(const Codec& codec) const {
+ // Match the codec id/name based on the typical static/dynamic name rules.
+ // Matching is case-insensitive.
+ const int kMaxStaticPayloadId = 95;
+ return (id <= kMaxStaticPayloadId || codec.id <= kMaxStaticPayloadId)
+ ? (id == codec.id)
+ : (_stricmp(name.c_str(), codec.name.c_str()) == 0);
+}
+
+bool Codec::GetParam(const std::string& name, std::string* out) const {
+ CodecParameterMap::const_iterator iter = params.find(name);
+ if (iter == params.end())
+ return false;
+ *out = iter->second;
+ return true;
+}
+
+bool Codec::GetParam(const std::string& name, int* out) const {
+ CodecParameterMap::const_iterator iter = params.find(name);
+ if (iter == params.end())
+ return false;
+ return rtc::FromString(iter->second, out);
+}
+
+void Codec::SetParam(const std::string& name, const std::string& value) {
+ params[name] = value;
+}
+
+void Codec::SetParam(const std::string& name, int value) {
+ params[name] = rtc::ToString(value);
+}
+
+bool Codec::RemoveParam(const std::string& name) {
+ return params.erase(name) == 1;
+}
+
+void Codec::AddFeedbackParam(const FeedbackParam& param) {
+ feedback_params.Add(param);
+}
+
+bool Codec::HasFeedbackParam(const FeedbackParam& param) const {
+ return feedback_params.Has(param);
+}
+
+void Codec::IntersectFeedbackParams(const Codec& other) {
+ feedback_params.Intersect(other.feedback_params);
+}
+
+webrtc::RtpCodecParameters Codec::ToCodecParameters() const {
+ webrtc::RtpCodecParameters codec_params;
+ codec_params.payload_type = id;
+ codec_params.name = name;
+ codec_params.clock_rate = clockrate;
+ return codec_params;
+}
+
+AudioCodec::AudioCodec(int id,
+ const std::string& name,
+ int clockrate,
+ int bitrate,
+ size_t channels)
+ : Codec(id, name, clockrate), bitrate(bitrate), channels(channels) {}
+
+AudioCodec::AudioCodec() : Codec(), bitrate(0), channels(0) {
+}
+
+AudioCodec::AudioCodec(const AudioCodec& c) = default;
+AudioCodec::AudioCodec(AudioCodec&& c) = default;
+AudioCodec& AudioCodec::operator=(const AudioCodec& c) = default;
+AudioCodec& AudioCodec::operator=(AudioCodec&& c) = default;
+
+bool AudioCodec::operator==(const AudioCodec& c) const {
+ return bitrate == c.bitrate && channels == c.channels && Codec::operator==(c);
+}
+
+bool AudioCodec::Matches(const AudioCodec& codec) const {
+ // If a nonzero clockrate is specified, it must match the actual clockrate.
+ // If a nonzero bitrate is specified, it must match the actual bitrate,
+ // unless the codec is VBR (0), where we just force the supplied value.
+ // The number of channels must match exactly, with the exception
+ // that channels=0 is treated synonymously as channels=1, per RFC
+ // 4566 section 6: " [The channels] parameter is OPTIONAL and may be
+ // omitted if the number of channels is one."
+ // Preference is ignored.
+ // TODO(juberti): Treat a zero clockrate as 8000Hz, the RTP default clockrate.
+ return Codec::Matches(codec) &&
+ ((codec.clockrate == 0 /*&& clockrate == 8000*/) ||
+ clockrate == codec.clockrate) &&
+ (codec.bitrate == 0 || bitrate <= 0 || bitrate == codec.bitrate) &&
+ ((codec.channels < 2 && channels < 2) || channels == codec.channels);
+}
+
+std::string AudioCodec::ToString() const {
+ std::ostringstream os;
+ os << "AudioCodec[" << id << ":" << name << ":" << clockrate << ":" << bitrate
+ << ":" << channels << "]";
+ return os.str();
+}
+
+webrtc::RtpCodecParameters AudioCodec::ToCodecParameters() const {
+ webrtc::RtpCodecParameters codec_params = Codec::ToCodecParameters();
+ codec_params.num_channels = static_cast<int>(channels);
+ codec_params.kind = MEDIA_TYPE_AUDIO;
+ return codec_params;
+}
+
+std::string VideoCodec::ToString() const {
+ std::ostringstream os;
+ os << "VideoCodec[" << id << ":" << name << "]";
+ return os.str();
+}
+
+webrtc::RtpCodecParameters VideoCodec::ToCodecParameters() const {
+ webrtc::RtpCodecParameters codec_params = Codec::ToCodecParameters();
+ codec_params.kind = MEDIA_TYPE_VIDEO;
+ return codec_params;
+}
+
+VideoCodec::VideoCodec(int id, const std::string& name)
+ : Codec(id, name, kVideoCodecClockrate) {
+ SetDefaultParameters();
+}
+
+VideoCodec::VideoCodec(const std::string& name) : VideoCodec(0 /* id */, name) {
+ SetDefaultParameters();
+}
+
+VideoCodec::VideoCodec() : Codec() {
+ clockrate = kVideoCodecClockrate;
+}
+
+VideoCodec::VideoCodec(const webrtc::SdpVideoFormat& c)
+ : Codec(0 /* id */, c.name, kVideoCodecClockrate) {
+ params = c.parameters;
+}
+
+VideoCodec::VideoCodec(const VideoCodec& c) = default;
+VideoCodec::VideoCodec(VideoCodec&& c) = default;
+VideoCodec& VideoCodec::operator=(const VideoCodec& c) = default;
+VideoCodec& VideoCodec::operator=(VideoCodec&& c) = default;
+
+void VideoCodec::SetDefaultParameters() {
+ if (_stricmp(kH264CodecName, name.c_str()) == 0) {
+ // This default is set for all H.264 codecs created because
+ // that was the default before packetization mode support was added.
+ // TODO(hta): Move this to the places that create VideoCodecs from
+ // SDP or from knowledge of implementation capabilities.
+ SetParam(kH264FmtpPacketizationMode, "1");
+ }
+}
+
+bool VideoCodec::operator==(const VideoCodec& c) const {
+ return Codec::operator==(c);
+}
+
+bool VideoCodec::Matches(const VideoCodec& other) const {
+ if (!Codec::Matches(other))
+ return false;
+ if (CodecNamesEq(name.c_str(), kH264CodecName))
+ return webrtc::H264::IsSameH264Profile(params, other.params);
+ return true;
+}
+
+VideoCodec VideoCodec::CreateRtxCodec(int rtx_payload_type,
+ int associated_payload_type) {
+ VideoCodec rtx_codec(rtx_payload_type, kRtxCodecName);
+ rtx_codec.SetParam(kCodecParamAssociatedPayloadType, associated_payload_type);
+ return rtx_codec;
+}
+
+VideoCodec::CodecType VideoCodec::GetCodecType() const {
+ const char* payload_name = name.c_str();
+ if (_stricmp(payload_name, kRedCodecName) == 0) {
+ return CODEC_RED;
+ }
+ if (_stricmp(payload_name, kUlpfecCodecName) == 0) {
+ return CODEC_ULPFEC;
+ }
+ if (_stricmp(payload_name, kFlexfecCodecName) == 0) {
+ return CODEC_FLEXFEC;
+ }
+ if (_stricmp(payload_name, kRtxCodecName) == 0) {
+ return CODEC_RTX;
+ }
+
+ return CODEC_VIDEO;
+}
+
+bool VideoCodec::ValidateCodecFormat() const {
+ if (id < 0 || id > 127) {
+ RTC_LOG(LS_ERROR) << "Codec with invalid payload type: " << ToString();
+ return false;
+ }
+ if (GetCodecType() != CODEC_VIDEO) {
+ return true;
+ }
+
+ // Video validation from here on.
+ int min_bitrate = -1;
+ int max_bitrate = -1;
+ if (GetParam(kCodecParamMinBitrate, &min_bitrate) &&
+ GetParam(kCodecParamMaxBitrate, &max_bitrate)) {
+ if (max_bitrate < min_bitrate) {
+ RTC_LOG(LS_ERROR) << "Codec with max < min bitrate: " << ToString();
+ return false;
+ }
+ }
+ return true;
+}
+
+DataCodec::DataCodec(int id, const std::string& name)
+ : Codec(id, name, kDataCodecClockrate) {}
+
+DataCodec::DataCodec() : Codec() {
+ clockrate = kDataCodecClockrate;
+}
+
+DataCodec::DataCodec(const DataCodec& c) = default;
+DataCodec::DataCodec(DataCodec&& c) = default;
+DataCodec& DataCodec::operator=(const DataCodec& c) = default;
+DataCodec& DataCodec::operator=(DataCodec&& c) = default;
+
+std::string DataCodec::ToString() const {
+ std::ostringstream os;
+ os << "DataCodec[" << id << ":" << name << "]";
+ return os.str();
+}
+
+bool HasNack(const Codec& codec) {
+ return codec.HasFeedbackParam(
+ FeedbackParam(kRtcpFbParamNack, kParamValueEmpty));
+}
+
+bool HasRemb(const Codec& codec) {
+ return codec.HasFeedbackParam(
+ FeedbackParam(kRtcpFbParamRemb, kParamValueEmpty));
+}
+
+bool HasTransportCc(const Codec& codec) {
+ return codec.HasFeedbackParam(
+ FeedbackParam(kRtcpFbParamTransportCc, kParamValueEmpty));
+}
+
+bool CodecNamesEq(const std::string& name1, const std::string& name2) {
+ return CodecNamesEq(name1.c_str(), name2.c_str());
+}
+
+bool CodecNamesEq(const char* name1, const char* name2) {
+ return _stricmp(name1, name2) == 0;
+}
+
+const VideoCodec* FindMatchingCodec(
+ const std::vector<VideoCodec>& supported_codecs,
+ const VideoCodec& codec) {
+ for (const VideoCodec& supported_codec : supported_codecs) {
+ if (IsSameCodec(codec.name, codec.params, supported_codec.name,
+ supported_codec.params)) {
+ return &supported_codec;
+ }
+ }
+ return nullptr;
+}
+
+bool IsSameCodec(const std::string& name1,
+ const CodecParameterMap& params1,
+ const std::string& name2,
+ const CodecParameterMap& params2) {
+ // If different names (case insensitive), then not same formats.
+ if (!CodecNamesEq(name1, name2))
+ return false;
+ // For every format besides H264, comparing names is enough.
+ return !CodecNamesEq(name1.c_str(), kH264CodecName) ||
+ webrtc::H264::IsSameH264Profile(params1, params2);
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/codec.h b/third_party/libwebrtc/webrtc/media/base/codec.h
new file mode 100644
index 0000000000..6a2dcf4529
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/codec.h
@@ -0,0 +1,264 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_CODEC_H_
+#define MEDIA_BASE_CODEC_H_
+
+#include <map>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "api/rtpparameters.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "common_types.h" // NOLINT(build/include)
+#include "media/base/mediaconstants.h"
+
+namespace cricket {
+
+typedef std::map<std::string, std::string> CodecParameterMap;
+
+class FeedbackParam {
+ public:
+ FeedbackParam() = default;
+ FeedbackParam(const std::string& id, const std::string& param)
+ : id_(id),
+ param_(param) {
+ }
+ explicit FeedbackParam(const std::string& id)
+ : id_(id),
+ param_(kParamValueEmpty) {
+ }
+ bool operator==(const FeedbackParam& other) const;
+
+ const std::string& id() const { return id_; }
+ const std::string& param() const { return param_; }
+
+ private:
+ std::string id_; // e.g. "nack", "ccm"
+ std::string param_; // e.g. "", "rpsi", "fir"
+};
+
+class FeedbackParams {
+ public:
+ FeedbackParams();
+ bool operator==(const FeedbackParams& other) const;
+
+ bool Has(const FeedbackParam& param) const;
+ void Add(const FeedbackParam& param);
+
+ void Intersect(const FeedbackParams& from);
+
+ const std::vector<FeedbackParam>& params() const { return params_; }
+ private:
+ bool HasDuplicateEntries() const;
+
+ std::vector<FeedbackParam> params_;
+};
+
+struct Codec {
+ int id;
+ std::string name;
+ int clockrate;
+ CodecParameterMap params;
+ FeedbackParams feedback_params;
+
+ virtual ~Codec();
+
+ // Indicates if this codec is compatible with the specified codec.
+ bool Matches(const Codec& codec) const;
+
+ // Find the parameter for |name| and write the value to |out|.
+ bool GetParam(const std::string& name, std::string* out) const;
+ bool GetParam(const std::string& name, int* out) const;
+
+ void SetParam(const std::string& name, const std::string& value);
+ void SetParam(const std::string& name, int value);
+
+ // It is safe to input a non-existent parameter.
+ // Returns true if the parameter existed, false if it did not exist.
+ bool RemoveParam(const std::string& name);
+
+ bool HasFeedbackParam(const FeedbackParam& param) const;
+ void AddFeedbackParam(const FeedbackParam& param);
+
+ // Filter |this| feedbacks params such that only those shared by both |this|
+ // and |other| are kept.
+ void IntersectFeedbackParams(const Codec& other);
+
+ virtual webrtc::RtpCodecParameters ToCodecParameters() const;
+
+ Codec& operator=(const Codec& c);
+ Codec& operator=(Codec&& c);
+
+ bool operator==(const Codec& c) const;
+
+ bool operator!=(const Codec& c) const {
+ return !(*this == c);
+ }
+
+ protected:
+ // A Codec can't be created without a subclass.
+ // Creates a codec with the given parameters.
+ Codec(int id, const std::string& name, int clockrate);
+ // Creates an empty codec.
+ Codec();
+ Codec(const Codec& c);
+ Codec(Codec&& c);
+};
+
+struct AudioCodec : public Codec {
+ int bitrate;
+ size_t channels;
+
+ // Creates a codec with the given parameters.
+ AudioCodec(int id,
+ const std::string& name,
+ int clockrate,
+ int bitrate,
+ size_t channels);
+ // Creates an empty codec.
+ AudioCodec();
+ AudioCodec(const AudioCodec& c);
+ AudioCodec(AudioCodec&& c);
+ ~AudioCodec() override = default;
+
+ // Indicates if this codec is compatible with the specified codec.
+ bool Matches(const AudioCodec& codec) const;
+
+ std::string ToString() const;
+
+ webrtc::RtpCodecParameters ToCodecParameters() const override;
+
+ AudioCodec& operator=(const AudioCodec& c);
+ AudioCodec& operator=(AudioCodec&& c);
+
+ bool operator==(const AudioCodec& c) const;
+
+ bool operator!=(const AudioCodec& c) const {
+ return !(*this == c);
+ }
+};
+
+inline std::ostream& operator<<(std::ostream& os, const AudioCodec& ac) {
+ os << "{id: " << ac.id;
+ os << ", name: " << ac.name;
+ os << ", clockrate: " << ac.clockrate;
+ os << ", bitrate: " << ac.bitrate;
+ os << ", channels: " << ac.channels;
+ os << ", params: {";
+ const char* sep = "";
+ for (const auto& kv : ac.params) {
+ os << sep << kv.first << ": " << kv.second;
+ sep = ", ";
+ }
+ os << "}, feedback_params: {";
+ sep = "";
+ for (const FeedbackParam& fp : ac.feedback_params.params()) {
+ os << sep << fp.id() << ": " << fp.param();
+ sep = ", ";
+ }
+ os << "}}";
+ return os;
+}
+
+struct VideoCodec : public Codec {
+ // Creates a codec with the given parameters.
+ VideoCodec(int id, const std::string& name);
+ // Creates a codec with the given name and empty id.
+ explicit VideoCodec(const std::string& name);
+ // Creates an empty codec.
+ VideoCodec();
+ VideoCodec(const VideoCodec& c);
+ explicit VideoCodec(const webrtc::SdpVideoFormat& c);
+ VideoCodec(VideoCodec&& c);
+ ~VideoCodec() override = default;
+
+ // Indicates if this video codec is the same as the other video codec, e.g. if
+ // they are both VP8 or VP9, or if they are both H264 with the same H264
+ // profile. H264 levels however are not compared.
+ bool Matches(const VideoCodec& codec) const;
+
+ std::string ToString() const;
+
+ webrtc::RtpCodecParameters ToCodecParameters() const override;
+
+ VideoCodec& operator=(const VideoCodec& c);
+ VideoCodec& operator=(VideoCodec&& c);
+
+ bool operator==(const VideoCodec& c) const;
+
+ bool operator!=(const VideoCodec& c) const {
+ return !(*this == c);
+ }
+
+ static VideoCodec CreateRtxCodec(int rtx_payload_type,
+ int associated_payload_type);
+
+ enum CodecType {
+ CODEC_VIDEO,
+ CODEC_RED,
+ CODEC_ULPFEC,
+ CODEC_FLEXFEC,
+ CODEC_RTX,
+ };
+
+ CodecType GetCodecType() const;
+ // Validates a VideoCodec's payload type, dimensions and bitrates etc. If they
+ // don't make sense (such as max < min bitrate), and error is logged and
+ // ValidateCodecFormat returns false.
+ bool ValidateCodecFormat() const;
+
+ private:
+ void SetDefaultParameters();
+};
+
+struct DataCodec : public Codec {
+ DataCodec(int id, const std::string& name);
+ DataCodec();
+ DataCodec(const DataCodec& c);
+ DataCodec(DataCodec&& c);
+ ~DataCodec() override = default;
+
+ DataCodec& operator=(const DataCodec& c);
+ DataCodec& operator=(DataCodec&& c);
+
+ std::string ToString() const;
+};
+
+// Get the codec setting associated with |payload_type|. If there
+// is no codec associated with that payload type it returns nullptr.
+template <class Codec>
+const Codec* FindCodecById(const std::vector<Codec>& codecs, int payload_type) {
+ for (const auto& codec : codecs) {
+ if (codec.id == payload_type)
+ return &codec;
+ }
+ return nullptr;
+}
+
+bool CodecNamesEq(const std::string& name1, const std::string& name2);
+bool CodecNamesEq(const char* name1, const char* name2);
+bool HasNack(const Codec& codec);
+bool HasRemb(const Codec& codec);
+bool HasTransportCc(const Codec& codec);
+// Returns the first codec in |supported_codecs| that matches |codec|, or
+// nullptr if no codec matches.
+const VideoCodec* FindMatchingCodec(
+ const std::vector<VideoCodec>& supported_codecs,
+ const VideoCodec& codec);
+bool IsSameCodec(const std::string& name1,
+ const CodecParameterMap& params1,
+ const std::string& name2,
+ const CodecParameterMap& params2);
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_CODEC_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/codec_unittest.cc b/third_party/libwebrtc/webrtc/media/base/codec_unittest.cc
new file mode 100644
index 0000000000..03d8684c64
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/codec_unittest.cc
@@ -0,0 +1,326 @@
+/*
+ * Copyright (c) 2009 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/codec.h"
+#include "rtc_base/gunit.h"
+
+using cricket::AudioCodec;
+using cricket::Codec;
+using cricket::DataCodec;
+using cricket::FeedbackParam;
+using cricket::VideoCodec;
+using cricket::kCodecParamAssociatedPayloadType;
+using cricket::kCodecParamMaxBitrate;
+using cricket::kCodecParamMinBitrate;
+
+class TestCodec : public Codec {
+ public:
+ TestCodec(int id, const std::string& name, int clockrate)
+ : Codec(id, name, clockrate) {}
+ TestCodec() : Codec() {}
+ TestCodec(const TestCodec& c) : Codec(c) {}
+};
+
+TEST(CodecTest, TestCodecOperators) {
+ TestCodec c0(96, "D", 1000);
+ c0.SetParam("a", 1);
+
+ TestCodec c1 = c0;
+ EXPECT_TRUE(c1 == c0);
+
+ int param_value0;
+ int param_value1;
+ EXPECT_TRUE(c0.GetParam("a", &param_value0));
+ EXPECT_TRUE(c1.GetParam("a", &param_value1));
+ EXPECT_EQ(param_value0, param_value1);
+
+ c1.id = 86;
+ EXPECT_TRUE(c0 != c1);
+
+ c1 = c0;
+ c1.name = "x";
+ EXPECT_TRUE(c0 != c1);
+
+ c1 = c0;
+ c1.clockrate = 2000;
+ EXPECT_TRUE(c0 != c1);
+
+ c1 = c0;
+ c1.SetParam("a", 2);
+ EXPECT_TRUE(c0 != c1);
+
+ TestCodec c5;
+ TestCodec c6(0, "", 0);
+ EXPECT_TRUE(c5 == c6);
+}
+
+TEST(CodecTest, TestAudioCodecOperators) {
+ AudioCodec c0(96, "A", 44100, 20000, 2);
+ AudioCodec c1(95, "A", 44100, 20000, 2);
+ AudioCodec c2(96, "x", 44100, 20000, 2);
+ AudioCodec c3(96, "A", 48000, 20000, 2);
+ AudioCodec c4(96, "A", 44100, 10000, 2);
+ AudioCodec c5(96, "A", 44100, 20000, 1);
+ EXPECT_NE(c0, c1);
+ EXPECT_NE(c0, c2);
+ EXPECT_NE(c0, c3);
+ EXPECT_NE(c0, c4);
+ EXPECT_NE(c0, c5);
+
+ AudioCodec c7;
+ AudioCodec c8(0, "", 0, 0, 0);
+ AudioCodec c9 = c0;
+ EXPECT_EQ(c8, c7);
+ EXPECT_NE(c9, c7);
+ EXPECT_EQ(c9, c0);
+
+ AudioCodec c10(c0);
+ AudioCodec c11(c0);
+ AudioCodec c12(c0);
+ AudioCodec c13(c0);
+ c10.params["x"] = "abc";
+ c11.params["x"] = "def";
+ c12.params["y"] = "abc";
+ c13.params["x"] = "abc";
+ EXPECT_NE(c10, c0);
+ EXPECT_NE(c11, c0);
+ EXPECT_NE(c11, c10);
+ EXPECT_NE(c12, c0);
+ EXPECT_NE(c12, c10);
+ EXPECT_NE(c12, c11);
+ EXPECT_EQ(c13, c10);
+}
+
+TEST(CodecTest, TestAudioCodecMatches) {
+ // Test a codec with a static payload type.
+ AudioCodec c0(95, "A", 44100, 20000, 1);
+ EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 44100, 20000, 1)));
+ EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 44100, 20000, 0)));
+ EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 44100, 0, 0)));
+ EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 0, 0, 0)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(96, "", 44100, 20000, 1)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 55100, 20000, 1)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 44100, 30000, 1)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 44100, 20000, 2)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 55100, 30000, 2)));
+
+ // Test a codec with a dynamic payload type.
+ AudioCodec c1(96, "A", 44100, 20000, 1);
+ EXPECT_TRUE(c1.Matches(AudioCodec(96, "A", 0, 0, 0)));
+ EXPECT_TRUE(c1.Matches(AudioCodec(97, "A", 0, 0, 0)));
+ EXPECT_TRUE(c1.Matches(AudioCodec(96, "a", 0, 0, 0)));
+ EXPECT_TRUE(c1.Matches(AudioCodec(97, "a", 0, 0, 0)));
+ EXPECT_FALSE(c1.Matches(AudioCodec(95, "A", 0, 0, 0)));
+ EXPECT_FALSE(c1.Matches(AudioCodec(96, "", 44100, 20000, 2)));
+ EXPECT_FALSE(c1.Matches(AudioCodec(96, "A", 55100, 30000, 1)));
+
+ // Test a codec with a dynamic payload type, and auto bitrate.
+ AudioCodec c2(97, "A", 16000, 0, 1);
+ // Use default bitrate.
+ EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, 0, 1)));
+ EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, 0, 0)));
+ // Use explicit bitrate.
+ EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, 32000, 1)));
+ // Backward compatibility with clients that might send "-1" (for default).
+ EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, -1, 1)));
+
+ // Stereo doesn't match channels = 0.
+ AudioCodec c3(96, "A", 44100, 20000, 2);
+ EXPECT_TRUE(c3.Matches(AudioCodec(96, "A", 44100, 20000, 2)));
+ EXPECT_FALSE(c3.Matches(AudioCodec(96, "A", 44100, 20000, 1)));
+ EXPECT_FALSE(c3.Matches(AudioCodec(96, "A", 44100, 20000, 0)));
+}
+
+TEST(CodecTest, TestVideoCodecOperators) {
+ VideoCodec c0(96, "V");
+ VideoCodec c1(95, "V");
+ VideoCodec c2(96, "x");
+
+ EXPECT_TRUE(c0 != c1);
+ EXPECT_TRUE(c0 != c2);
+
+ VideoCodec c7;
+ VideoCodec c8(0, "");
+ VideoCodec c9 = c0;
+ EXPECT_TRUE(c8 == c7);
+ EXPECT_TRUE(c9 != c7);
+ EXPECT_TRUE(c9 == c0);
+
+ VideoCodec c10(c0);
+ VideoCodec c11(c0);
+ VideoCodec c12(c0);
+ VideoCodec c13(c0);
+ c10.params["x"] = "abc";
+ c11.params["x"] = "def";
+ c12.params["y"] = "abc";
+ c13.params["x"] = "abc";
+ EXPECT_TRUE(c10 != c0);
+ EXPECT_TRUE(c11 != c0);
+ EXPECT_TRUE(c11 != c10);
+ EXPECT_TRUE(c12 != c0);
+ EXPECT_TRUE(c12 != c10);
+ EXPECT_TRUE(c12 != c11);
+ EXPECT_TRUE(c13 == c10);
+}
+
+TEST(CodecTest, TestVideoCodecMatches) {
+ // Test a codec with a static payload type.
+ VideoCodec c0(95, "V");
+ EXPECT_TRUE(c0.Matches(VideoCodec(95, "")));
+ EXPECT_FALSE(c0.Matches(VideoCodec(96, "")));
+
+ // Test a codec with a dynamic payload type.
+ VideoCodec c1(96, "V");
+ EXPECT_TRUE(c1.Matches(VideoCodec(96, "V")));
+ EXPECT_TRUE(c1.Matches(VideoCodec(97, "V")));
+ EXPECT_TRUE(c1.Matches(VideoCodec(96, "v")));
+ EXPECT_TRUE(c1.Matches(VideoCodec(97, "v")));
+ EXPECT_FALSE(c1.Matches(VideoCodec(96, "")));
+ EXPECT_FALSE(c1.Matches(VideoCodec(95, "V")));
+}
+
+TEST(CodecTest, TestDataCodecMatches) {
+ // Test a codec with a static payload type.
+ DataCodec c0(95, "D");
+ EXPECT_TRUE(c0.Matches(DataCodec(95, "")));
+ EXPECT_FALSE(c0.Matches(DataCodec(96, "")));
+
+ // Test a codec with a dynamic payload type.
+ DataCodec c1(96, "D");
+ EXPECT_TRUE(c1.Matches(DataCodec(96, "D")));
+ EXPECT_TRUE(c1.Matches(DataCodec(97, "D")));
+ EXPECT_TRUE(c1.Matches(DataCodec(96, "d")));
+ EXPECT_TRUE(c1.Matches(DataCodec(97, "d")));
+ EXPECT_FALSE(c1.Matches(DataCodec(96, "")));
+ EXPECT_FALSE(c1.Matches(DataCodec(95, "D")));
+}
+
+TEST(CodecTest, TestSetParamGetParamAndRemoveParam) {
+ AudioCodec codec;
+ codec.SetParam("a", "1");
+ codec.SetParam("b", "x");
+
+ int int_value = 0;
+ EXPECT_TRUE(codec.GetParam("a", &int_value));
+ EXPECT_EQ(1, int_value);
+ EXPECT_FALSE(codec.GetParam("b", &int_value));
+ EXPECT_FALSE(codec.GetParam("c", &int_value));
+
+ std::string str_value;
+ EXPECT_TRUE(codec.GetParam("a", &str_value));
+ EXPECT_EQ("1", str_value);
+ EXPECT_TRUE(codec.GetParam("b", &str_value));
+ EXPECT_EQ("x", str_value);
+ EXPECT_FALSE(codec.GetParam("c", &str_value));
+ EXPECT_TRUE(codec.RemoveParam("a"));
+ EXPECT_FALSE(codec.RemoveParam("c"));
+}
+
+TEST(CodecTest, TestIntersectFeedbackParams) {
+ const FeedbackParam a1("a", "1");
+ const FeedbackParam b2("b", "2");
+ const FeedbackParam b3("b", "3");
+ const FeedbackParam c3("c", "3");
+ TestCodec c1;
+ c1.AddFeedbackParam(a1); // Only match with c2.
+ c1.AddFeedbackParam(b2); // Same param different values.
+ c1.AddFeedbackParam(c3); // Not in c2.
+ TestCodec c2;
+ c2.AddFeedbackParam(a1);
+ c2.AddFeedbackParam(b3);
+
+ c1.IntersectFeedbackParams(c2);
+ EXPECT_TRUE(c1.HasFeedbackParam(a1));
+ EXPECT_FALSE(c1.HasFeedbackParam(b2));
+ EXPECT_FALSE(c1.HasFeedbackParam(c3));
+}
+
+TEST(CodecTest, TestGetCodecType) {
+ // Codec type comparison should be case insenstive on names.
+ const VideoCodec codec(96, "V");
+ const VideoCodec rtx_codec(96, "rTx");
+ const VideoCodec ulpfec_codec(96, "ulpFeC");
+ const VideoCodec flexfec_codec(96, "FlExFeC-03");
+ const VideoCodec red_codec(96, "ReD");
+ EXPECT_EQ(VideoCodec::CODEC_VIDEO, codec.GetCodecType());
+ EXPECT_EQ(VideoCodec::CODEC_RTX, rtx_codec.GetCodecType());
+ EXPECT_EQ(VideoCodec::CODEC_ULPFEC, ulpfec_codec.GetCodecType());
+ EXPECT_EQ(VideoCodec::CODEC_FLEXFEC, flexfec_codec.GetCodecType());
+ EXPECT_EQ(VideoCodec::CODEC_RED, red_codec.GetCodecType());
+}
+
+TEST(CodecTest, TestCreateRtxCodec) {
+ VideoCodec rtx_codec = VideoCodec::CreateRtxCodec(96, 120);
+ EXPECT_EQ(96, rtx_codec.id);
+ EXPECT_EQ(VideoCodec::CODEC_RTX, rtx_codec.GetCodecType());
+ int associated_payload_type;
+ ASSERT_TRUE(rtx_codec.GetParam(kCodecParamAssociatedPayloadType,
+ &associated_payload_type));
+ EXPECT_EQ(120, associated_payload_type);
+}
+
+TEST(CodecTest, TestValidateCodecFormat) {
+ const VideoCodec codec(96, "V");
+ ASSERT_TRUE(codec.ValidateCodecFormat());
+
+ // Accept 0-127 as payload types.
+ VideoCodec low_payload_type = codec;
+ low_payload_type.id = 0;
+ VideoCodec high_payload_type = codec;
+ high_payload_type.id = 127;
+ ASSERT_TRUE(low_payload_type.ValidateCodecFormat());
+ EXPECT_TRUE(high_payload_type.ValidateCodecFormat());
+
+ // Reject negative payloads.
+ VideoCodec negative_payload_type = codec;
+ negative_payload_type.id = -1;
+ EXPECT_FALSE(negative_payload_type.ValidateCodecFormat());
+
+ // Reject too-high payloads.
+ VideoCodec too_high_payload_type = codec;
+ too_high_payload_type.id = 128;
+ EXPECT_FALSE(too_high_payload_type.ValidateCodecFormat());
+
+ // Reject codecs with min bitrate > max bitrate.
+ VideoCodec incorrect_bitrates = codec;
+ incorrect_bitrates.params[kCodecParamMinBitrate] = "100";
+ incorrect_bitrates.params[kCodecParamMaxBitrate] = "80";
+ EXPECT_FALSE(incorrect_bitrates.ValidateCodecFormat());
+
+ // Accept min bitrate == max bitrate.
+ VideoCodec equal_bitrates = codec;
+ equal_bitrates.params[kCodecParamMinBitrate] = "100";
+ equal_bitrates.params[kCodecParamMaxBitrate] = "100";
+ EXPECT_TRUE(equal_bitrates.ValidateCodecFormat());
+
+ // Accept min bitrate < max bitrate.
+ VideoCodec different_bitrates = codec;
+ different_bitrates.params[kCodecParamMinBitrate] = "99";
+ different_bitrates.params[kCodecParamMaxBitrate] = "100";
+ EXPECT_TRUE(different_bitrates.ValidateCodecFormat());
+}
+
+TEST(CodecTest, TestToCodecParameters) {
+ const VideoCodec v(96, "V");
+ webrtc::RtpCodecParameters codec_params_1 = v.ToCodecParameters();
+ EXPECT_EQ(96, codec_params_1.payload_type);
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, codec_params_1.kind);
+ EXPECT_EQ("V", codec_params_1.name);
+ EXPECT_EQ(cricket::kVideoCodecClockrate, codec_params_1.clock_rate);
+ EXPECT_EQ(rtc::nullopt, codec_params_1.num_channels);
+
+ const AudioCodec a(97, "A", 44100, 20000, 2);
+ webrtc::RtpCodecParameters codec_params_2 = a.ToCodecParameters();
+ EXPECT_EQ(97, codec_params_2.payload_type);
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, codec_params_2.kind);
+ EXPECT_EQ("A", codec_params_2.name);
+ EXPECT_EQ(44100, codec_params_2.clock_rate);
+ EXPECT_EQ(2, codec_params_2.num_channels);
+}
diff --git a/third_party/libwebrtc/webrtc/media/base/cryptoparams.h b/third_party/libwebrtc/webrtc/media/base/cryptoparams.h
new file mode 100644
index 0000000000..9ba17eebba
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/cryptoparams.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// TODO(bugs.webrtc.org/7504): Remove.
+#ifndef MEDIA_BASE_CRYPTOPARAMS_H_
+#define MEDIA_BASE_CRYPTOPARAMS_H_
+
+#include "api/cryptoparams.h"
+
+#endif // MEDIA_BASE_CRYPTOPARAMS_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/device.h b/third_party/libwebrtc/webrtc/media/base/device.h
new file mode 100644
index 0000000000..f47293823f
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/device.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_DEVICE_H_
+#define MEDIA_BASE_DEVICE_H_
+
+#include <string>
+
+#include "rtc_base/stringencode.h"
+
+namespace cricket {
+
+// Used to represent an audio or video capture or render device.
+struct Device {
+ Device() {}
+ Device(const std::string& name, int id)
+ : name(name),
+ id(rtc::ToString(id)) {
+ }
+ Device(const std::string& name, const std::string& id)
+ : name(name), id(id) {}
+
+ std::string name;
+ std::string id;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_DEVICE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/fakemediaengine.h b/third_party/libwebrtc/webrtc/media/base/fakemediaengine.h
new file mode 100644
index 0000000000..38458f2e08
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/fakemediaengine.h
@@ -0,0 +1,978 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_FAKEMEDIAENGINE_H_
+#define MEDIA_BASE_FAKEMEDIAENGINE_H_
+
+#include <list>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <tuple>
+#include <utility>
+#include <vector>
+
+#include "api/call/audio_sink.h"
+#include "media/base/audiosource.h"
+#include "media/base/mediaengine.h"
+#include "media/base/rtputils.h"
+#include "media/base/streamparams.h"
+#include "media/engine/webrtcvideoengine.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "p2p/base/sessiondescription.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/copyonwritebuffer.h"
+#include "rtc_base/networkroute.h"
+#include "rtc_base/ptr_util.h"
+#include "rtc_base/stringutils.h"
+
+using webrtc::RtpExtension;
+
+namespace cricket {
+
+class FakeMediaEngine;
+class FakeVideoEngine;
+class FakeVoiceEngine;
+
+// A common helper class that handles sending and receiving RTP/RTCP packets.
+template <class Base> class RtpHelper : public Base {
+ public:
+ RtpHelper()
+ : sending_(false),
+ playout_(false),
+ fail_set_send_codecs_(false),
+ fail_set_recv_codecs_(false),
+ send_ssrc_(0),
+ ready_to_send_(false),
+ transport_overhead_per_packet_(0),
+ num_network_route_changes_(0) {}
+ virtual ~RtpHelper() = default;
+ const std::vector<RtpExtension>& recv_extensions() {
+ return recv_extensions_;
+ }
+ const std::vector<RtpExtension>& send_extensions() {
+ return send_extensions_;
+ }
+ bool sending() const { return sending_; }
+ bool playout() const { return playout_; }
+ const std::list<std::string>& rtp_packets() const { return rtp_packets_; }
+ const std::list<std::string>& rtcp_packets() const { return rtcp_packets_; }
+
+ bool SendRtp(const void* data,
+ size_t len,
+ const rtc::PacketOptions& options) {
+ if (!sending_) {
+ return false;
+ }
+ rtc::CopyOnWriteBuffer packet(reinterpret_cast<const uint8_t*>(data), len,
+ kMaxRtpPacketLen);
+ return Base::SendPacket(&packet, options);
+ }
+ bool SendRtcp(const void* data, size_t len) {
+ rtc::CopyOnWriteBuffer packet(reinterpret_cast<const uint8_t*>(data), len,
+ kMaxRtpPacketLen);
+ return Base::SendRtcp(&packet, rtc::PacketOptions());
+ }
+
+ bool CheckRtp(const void* data, size_t len) {
+ bool success = !rtp_packets_.empty();
+ if (success) {
+ std::string packet = rtp_packets_.front();
+ rtp_packets_.pop_front();
+ success = (packet == std::string(static_cast<const char*>(data), len));
+ }
+ return success;
+ }
+ bool CheckRtcp(const void* data, size_t len) {
+ bool success = !rtcp_packets_.empty();
+ if (success) {
+ std::string packet = rtcp_packets_.front();
+ rtcp_packets_.pop_front();
+ success = (packet == std::string(static_cast<const char*>(data), len));
+ }
+ return success;
+ }
+ bool CheckNoRtp() { return rtp_packets_.empty(); }
+ bool CheckNoRtcp() { return rtcp_packets_.empty(); }
+ void set_fail_set_send_codecs(bool fail) { fail_set_send_codecs_ = fail; }
+ void set_fail_set_recv_codecs(bool fail) { fail_set_recv_codecs_ = fail; }
+ virtual bool AddSendStream(const StreamParams& sp) {
+ if (std::find(send_streams_.begin(), send_streams_.end(), sp) !=
+ send_streams_.end()) {
+ return false;
+ }
+ send_streams_.push_back(sp);
+ rtp_send_parameters_[sp.first_ssrc()] =
+ CreateRtpParametersWithOneEncoding();
+ return true;
+ }
+ virtual bool RemoveSendStream(uint32_t ssrc) {
+ auto parameters_iterator = rtp_send_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_send_parameters_.end()) {
+ rtp_send_parameters_.erase(parameters_iterator);
+ }
+ return RemoveStreamBySsrc(&send_streams_, ssrc);
+ }
+ virtual bool AddRecvStream(const StreamParams& sp) {
+ if (std::find(receive_streams_.begin(), receive_streams_.end(), sp) !=
+ receive_streams_.end()) {
+ return false;
+ }
+ receive_streams_.push_back(sp);
+ rtp_receive_parameters_[sp.first_ssrc()] =
+ CreateRtpParametersWithOneEncoding();
+ return true;
+ }
+ virtual bool RemoveRecvStream(uint32_t ssrc) {
+ auto parameters_iterator = rtp_receive_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_receive_parameters_.end()) {
+ rtp_receive_parameters_.erase(parameters_iterator);
+ }
+ return RemoveStreamBySsrc(&receive_streams_, ssrc);
+ }
+
+ virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const {
+ auto parameters_iterator = rtp_send_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_send_parameters_.end()) {
+ return parameters_iterator->second;
+ }
+ return webrtc::RtpParameters();
+ }
+ virtual bool SetRtpSendParameters(uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ auto parameters_iterator = rtp_send_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_send_parameters_.end()) {
+ parameters_iterator->second = parameters;
+ return true;
+ }
+ // Replicate the behavior of the real media channel: return false
+ // when setting parameters for unknown SSRCs.
+ return false;
+ }
+
+ virtual webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const {
+ auto parameters_iterator = rtp_receive_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_receive_parameters_.end()) {
+ return parameters_iterator->second;
+ }
+ return webrtc::RtpParameters();
+ }
+ virtual bool SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ auto parameters_iterator = rtp_receive_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_receive_parameters_.end()) {
+ parameters_iterator->second = parameters;
+ return true;
+ }
+ // Replicate the behavior of the real media channel: return false
+ // when setting parameters for unknown SSRCs.
+ return false;
+ }
+
+ bool IsStreamMuted(uint32_t ssrc) const {
+ bool ret = muted_streams_.find(ssrc) != muted_streams_.end();
+ // If |ssrc = 0| check if the first send stream is muted.
+ if (!ret && ssrc == 0 && !send_streams_.empty()) {
+ return muted_streams_.find(send_streams_[0].first_ssrc()) !=
+ muted_streams_.end();
+ }
+ return ret;
+ }
+ const std::vector<StreamParams>& send_streams() const {
+ return send_streams_;
+ }
+ const std::vector<StreamParams>& recv_streams() const {
+ return receive_streams_;
+ }
+ bool HasRecvStream(uint32_t ssrc) const {
+ return GetStreamBySsrc(receive_streams_, ssrc) != nullptr;
+ }
+ bool HasSendStream(uint32_t ssrc) const {
+ return GetStreamBySsrc(send_streams_, ssrc) != nullptr;
+ }
+ // TODO(perkj): This is to support legacy unit test that only check one
+ // sending stream.
+ uint32_t send_ssrc() const {
+ if (send_streams_.empty())
+ return 0;
+ return send_streams_[0].first_ssrc();
+ }
+
+ // TODO(perkj): This is to support legacy unit test that only check one
+ // sending stream.
+ const std::string rtcp_cname() {
+ if (send_streams_.empty())
+ return "";
+ return send_streams_[0].cname;
+ }
+ const RtcpParameters& send_rtcp_parameters() { return send_rtcp_parameters_; }
+ const RtcpParameters& recv_rtcp_parameters() { return recv_rtcp_parameters_; }
+
+ bool ready_to_send() const {
+ return ready_to_send_;
+ }
+
+ int transport_overhead_per_packet() const {
+ return transport_overhead_per_packet_;
+ }
+
+ rtc::NetworkRoute last_network_route() const { return last_network_route_; }
+ int num_network_route_changes() const { return num_network_route_changes_; }
+ void set_num_network_route_changes(int changes) {
+ num_network_route_changes_ = changes;
+ }
+
+ protected:
+ bool MuteStream(uint32_t ssrc, bool mute) {
+ if (!HasSendStream(ssrc) && ssrc != 0) {
+ return false;
+ }
+ if (mute) {
+ muted_streams_.insert(ssrc);
+ } else {
+ muted_streams_.erase(ssrc);
+ }
+ return true;
+ }
+ bool set_sending(bool send) {
+ sending_ = send;
+ return true;
+ }
+ void set_playout(bool playout) { playout_ = playout; }
+ bool SetRecvRtpHeaderExtensions(const std::vector<RtpExtension>& extensions) {
+ recv_extensions_ = extensions;
+ return true;
+ }
+ bool SetSendRtpHeaderExtensions(const std::vector<RtpExtension>& extensions) {
+ send_extensions_ = extensions;
+ return true;
+ }
+ void set_send_rtcp_parameters(const RtcpParameters& params) {
+ send_rtcp_parameters_ = params;
+ }
+ void set_recv_rtcp_parameters(const RtcpParameters& params) {
+ recv_rtcp_parameters_ = params;
+ }
+ virtual void OnPacketReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) {
+ rtp_packets_.push_back(std::string(packet->data<char>(), packet->size()));
+ }
+ virtual void OnRtcpReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) {
+ rtcp_packets_.push_back(std::string(packet->data<char>(), packet->size()));
+ }
+ virtual void OnReadyToSend(bool ready) {
+ ready_to_send_ = ready;
+ }
+
+ virtual void OnNetworkRouteChanged(const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) {
+ last_network_route_ = network_route;
+ ++num_network_route_changes_;
+ transport_overhead_per_packet_ = network_route.packet_overhead;
+ }
+ bool fail_set_send_codecs() const { return fail_set_send_codecs_; }
+ bool fail_set_recv_codecs() const { return fail_set_recv_codecs_; }
+
+ private:
+ bool sending_;
+ bool playout_;
+ std::vector<RtpExtension> recv_extensions_;
+ std::vector<RtpExtension> send_extensions_;
+ std::list<std::string> rtp_packets_;
+ std::list<std::string> rtcp_packets_;
+ std::vector<StreamParams> send_streams_;
+ std::vector<StreamParams> receive_streams_;
+ RtcpParameters send_rtcp_parameters_;
+ RtcpParameters recv_rtcp_parameters_;
+ std::set<uint32_t> muted_streams_;
+ std::map<uint32_t, webrtc::RtpParameters> rtp_send_parameters_;
+ std::map<uint32_t, webrtc::RtpParameters> rtp_receive_parameters_;
+ bool fail_set_send_codecs_;
+ bool fail_set_recv_codecs_;
+ uint32_t send_ssrc_;
+ std::string rtcp_cname_;
+ bool ready_to_send_;
+ int transport_overhead_per_packet_;
+ rtc::NetworkRoute last_network_route_;
+ int num_network_route_changes_;
+};
+
+class FakeVoiceMediaChannel : public RtpHelper<VoiceMediaChannel> {
+ public:
+ struct DtmfInfo {
+ DtmfInfo(uint32_t ssrc, int event_code, int duration)
+ : ssrc(ssrc),
+ event_code(event_code),
+ duration(duration) {}
+ uint32_t ssrc;
+ int event_code;
+ int duration;
+ };
+ explicit FakeVoiceMediaChannel(FakeVoiceEngine* engine,
+ const AudioOptions& options)
+ : engine_(engine), max_bps_(-1) {
+ output_scalings_[0] = 1.0; // For default channel.
+ SetOptions(options);
+ }
+ ~FakeVoiceMediaChannel();
+ const std::vector<AudioCodec>& recv_codecs() const { return recv_codecs_; }
+ const std::vector<AudioCodec>& send_codecs() const { return send_codecs_; }
+ const std::vector<AudioCodec>& codecs() const { return send_codecs(); }
+ const std::vector<DtmfInfo>& dtmf_info_queue() const {
+ return dtmf_info_queue_;
+ }
+ const AudioOptions& options() const { return options_; }
+ int max_bps() const { return max_bps_; }
+ virtual bool SetSendParameters(const AudioSendParameters& params) {
+ set_send_rtcp_parameters(params.rtcp);
+ return (SetSendCodecs(params.codecs) &&
+ SetSendRtpHeaderExtensions(params.extensions) &&
+ SetMaxSendBandwidth(params.max_bandwidth_bps) &&
+ SetOptions(params.options));
+ }
+
+ virtual bool SetRecvParameters(const AudioRecvParameters& params) {
+ set_recv_rtcp_parameters(params.rtcp);
+ return (SetRecvCodecs(params.codecs) &&
+ SetRecvRtpHeaderExtensions(params.extensions));
+ }
+
+ virtual void SetPlayout(bool playout) { set_playout(playout); }
+ virtual void SetSend(bool send) { set_sending(send); }
+ virtual bool SetAudioSend(uint32_t ssrc,
+ bool enable,
+ const AudioOptions* options,
+ AudioSource* source) {
+ if (!SetLocalSource(ssrc, source)) {
+ return false;
+ }
+ if (!RtpHelper<VoiceMediaChannel>::MuteStream(ssrc, !enable)) {
+ return false;
+ }
+ if (enable && options) {
+ return SetOptions(*options);
+ }
+ return true;
+ }
+
+ bool HasSource(uint32_t ssrc) const {
+ return local_sinks_.find(ssrc) != local_sinks_.end();
+ }
+
+ virtual bool AddRecvStream(const StreamParams& sp) {
+ if (!RtpHelper<VoiceMediaChannel>::AddRecvStream(sp))
+ return false;
+ output_scalings_[sp.first_ssrc()] = 1.0;
+ return true;
+ }
+ virtual bool RemoveRecvStream(uint32_t ssrc) {
+ if (!RtpHelper<VoiceMediaChannel>::RemoveRecvStream(ssrc))
+ return false;
+ output_scalings_.erase(ssrc);
+ return true;
+ }
+
+ virtual bool GetActiveStreams(StreamList* streams) { return true; }
+ virtual int GetOutputLevel() { return 0; }
+
+ virtual bool CanInsertDtmf() {
+ for (std::vector<AudioCodec>::const_iterator it = send_codecs_.begin();
+ it != send_codecs_.end(); ++it) {
+ // Find the DTMF telephone event "codec".
+ if (_stricmp(it->name.c_str(), "telephone-event") == 0) {
+ return true;
+ }
+ }
+ return false;
+ }
+ virtual bool InsertDtmf(uint32_t ssrc,
+ int event_code,
+ int duration) {
+ dtmf_info_queue_.push_back(DtmfInfo(ssrc, event_code, duration));
+ return true;
+ }
+
+ virtual bool SetOutputVolume(uint32_t ssrc, double volume) {
+ if (0 == ssrc) {
+ std::map<uint32_t, double>::iterator it;
+ for (it = output_scalings_.begin(); it != output_scalings_.end(); ++it) {
+ it->second = volume;
+ }
+ return true;
+ } else if (output_scalings_.find(ssrc) != output_scalings_.end()) {
+ output_scalings_[ssrc] = volume;
+ return true;
+ }
+ return false;
+ }
+ bool GetOutputVolume(uint32_t ssrc, double* volume) {
+ if (output_scalings_.find(ssrc) == output_scalings_.end())
+ return false;
+ *volume = output_scalings_[ssrc];
+ return true;
+ }
+
+ virtual bool GetStats(VoiceMediaInfo* info) { return false; }
+
+ virtual void SetRawAudioSink(
+ uint32_t ssrc,
+ std::unique_ptr<webrtc::AudioSinkInterface> sink) {
+ sink_ = std::move(sink);
+ }
+
+ virtual std::vector<webrtc::RtpSource> GetSources(uint32_t ssrc) const {
+ return std::vector<webrtc::RtpSource>();
+ }
+
+ private:
+ class VoiceChannelAudioSink : public AudioSource::Sink {
+ public:
+ explicit VoiceChannelAudioSink(AudioSource* source) : source_(source) {
+ source_->SetSink(this);
+ }
+ virtual ~VoiceChannelAudioSink() {
+ if (source_) {
+ source_->SetSink(nullptr);
+ }
+ }
+ void OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames) override {}
+ void OnClose() override { source_ = nullptr; }
+ AudioSource* source() const { return source_; }
+
+ private:
+ AudioSource* source_;
+ };
+
+ bool SetRecvCodecs(const std::vector<AudioCodec>& codecs) {
+ if (fail_set_recv_codecs()) {
+ // Fake the failure in SetRecvCodecs.
+ return false;
+ }
+ recv_codecs_ = codecs;
+ return true;
+ }
+ bool SetSendCodecs(const std::vector<AudioCodec>& codecs) {
+ if (fail_set_send_codecs()) {
+ // Fake the failure in SetSendCodecs.
+ return false;
+ }
+ send_codecs_ = codecs;
+ return true;
+ }
+ bool SetMaxSendBandwidth(int bps) {
+ max_bps_ = bps;
+ return true;
+ }
+ bool SetOptions(const AudioOptions& options) {
+ // Does a "merge" of current options and set options.
+ options_.SetAll(options);
+ return true;
+ }
+ bool SetLocalSource(uint32_t ssrc, AudioSource* source) {
+ auto it = local_sinks_.find(ssrc);
+ if (source) {
+ if (it != local_sinks_.end()) {
+ RTC_CHECK(it->second->source() == source);
+ } else {
+ local_sinks_.insert(std::make_pair(
+ ssrc, rtc::MakeUnique<VoiceChannelAudioSink>(source)));
+ }
+ } else {
+ if (it != local_sinks_.end()) {
+ local_sinks_.erase(it);
+ }
+ }
+ return true;
+ }
+
+ FakeVoiceEngine* engine_;
+ std::vector<AudioCodec> recv_codecs_;
+ std::vector<AudioCodec> send_codecs_;
+ std::map<uint32_t, double> output_scalings_;
+ std::vector<DtmfInfo> dtmf_info_queue_;
+ AudioOptions options_;
+ std::map<uint32_t, std::unique_ptr<VoiceChannelAudioSink>> local_sinks_;
+ std::unique_ptr<webrtc::AudioSinkInterface> sink_;
+ int max_bps_;
+};
+
+// A helper function to compare the FakeVoiceMediaChannel::DtmfInfo.
+inline bool CompareDtmfInfo(const FakeVoiceMediaChannel::DtmfInfo& info,
+ uint32_t ssrc,
+ int event_code,
+ int duration) {
+ return (info.duration == duration && info.event_code == event_code &&
+ info.ssrc == ssrc);
+}
+
+class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
+ public:
+ FakeVideoMediaChannel(FakeVideoEngine* engine, const VideoOptions& options)
+ : engine_(engine), max_bps_(-1) {
+ SetOptions(options);
+ }
+
+ ~FakeVideoMediaChannel();
+
+ const std::vector<VideoCodec>& recv_codecs() const { return recv_codecs_; }
+ const std::vector<VideoCodec>& send_codecs() const { return send_codecs_; }
+ const std::vector<VideoCodec>& codecs() const { return send_codecs(); }
+ bool rendering() const { return playout(); }
+ const VideoOptions& options() const { return options_; }
+ const std::map<uint32_t, rtc::VideoSinkInterface<webrtc::VideoFrame>*>&
+ sinks() const {
+ return sinks_;
+ }
+ int max_bps() const { return max_bps_; }
+ bool SetSendParameters(const VideoSendParameters& params) override {
+ set_send_rtcp_parameters(params.rtcp);
+ return (SetSendCodecs(params.codecs) &&
+ SetSendRtpHeaderExtensions(params.extensions) &&
+ SetMaxSendBandwidth(params.max_bandwidth_bps));
+ }
+ bool SetRecvParameters(const VideoRecvParameters& params) override {
+ set_recv_rtcp_parameters(params.rtcp);
+ return (SetRecvCodecs(params.codecs) &&
+ SetRecvRtpHeaderExtensions(params.extensions));
+ }
+ bool AddSendStream(const StreamParams& sp) override {
+ return RtpHelper<VideoMediaChannel>::AddSendStream(sp);
+ }
+ bool RemoveSendStream(uint32_t ssrc) override {
+ return RtpHelper<VideoMediaChannel>::RemoveSendStream(ssrc);
+ }
+
+ bool GetSendCodec(VideoCodec* send_codec) override {
+ if (send_codecs_.empty()) {
+ return false;
+ }
+ *send_codec = send_codecs_[0];
+ return true;
+ }
+ bool SetSink(uint32_t ssrc,
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override {
+ if (ssrc != 0 && sinks_.find(ssrc) == sinks_.end()) {
+ return false;
+ }
+ if (ssrc != 0) {
+ sinks_[ssrc] = sink;
+ }
+ return true;
+ }
+ bool HasSink(uint32_t ssrc) const {
+ return sinks_.find(ssrc) != sinks_.end() && sinks_.at(ssrc) != nullptr;
+ }
+
+ bool SetSend(bool send) override { return set_sending(send); }
+ bool SetVideoSend(
+ uint32_t ssrc,
+ bool enable,
+ const VideoOptions* options,
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source) override {
+ if (!RtpHelper<VideoMediaChannel>::MuteStream(ssrc, !enable)) {
+ return false;
+ }
+ if (enable && options) {
+ if (!SetOptions(*options)) {
+ return false;
+ }
+ }
+ sources_[ssrc] = source;
+ return true;
+ }
+
+ bool HasSource(uint32_t ssrc) const {
+ return sources_.find(ssrc) != sources_.end() &&
+ sources_.at(ssrc) != nullptr;
+ }
+ bool AddRecvStream(const StreamParams& sp) override {
+ if (!RtpHelper<VideoMediaChannel>::AddRecvStream(sp))
+ return false;
+ sinks_[sp.first_ssrc()] = NULL;
+ return true;
+ }
+ bool RemoveRecvStream(uint32_t ssrc) override {
+ if (!RtpHelper<VideoMediaChannel>::RemoveRecvStream(ssrc))
+ return false;
+ sinks_.erase(ssrc);
+ return true;
+ }
+
+ void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) override {}
+ bool GetStats(VideoMediaInfo* info) override { return false; }
+
+ private:
+ bool SetRecvCodecs(const std::vector<VideoCodec>& codecs) {
+ if (fail_set_recv_codecs()) {
+ // Fake the failure in SetRecvCodecs.
+ return false;
+ }
+ recv_codecs_ = codecs;
+ return true;
+ }
+ bool SetSendCodecs(const std::vector<VideoCodec>& codecs) {
+ if (fail_set_send_codecs()) {
+ // Fake the failure in SetSendCodecs.
+ return false;
+ }
+ send_codecs_ = codecs;
+
+ return true;
+ }
+ bool SetOptions(const VideoOptions& options) {
+ options_ = options;
+ return true;
+ }
+ bool SetMaxSendBandwidth(int bps) {
+ max_bps_ = bps;
+ return true;
+ }
+
+ FakeVideoEngine* engine_;
+ std::vector<VideoCodec> recv_codecs_;
+ std::vector<VideoCodec> send_codecs_;
+ std::map<uint32_t, rtc::VideoSinkInterface<webrtc::VideoFrame>*> sinks_;
+ std::map<uint32_t, rtc::VideoSourceInterface<webrtc::VideoFrame>*> sources_;
+ VideoOptions options_;
+ int max_bps_;
+};
+
+// Dummy option class, needed for the DataTraits abstraction in
+// channel_unittest.c.
+class DataOptions {};
+
+class FakeDataMediaChannel : public RtpHelper<DataMediaChannel> {
+ public:
+ explicit FakeDataMediaChannel(void* unused, const DataOptions& options)
+ : send_blocked_(false), max_bps_(-1) {}
+ ~FakeDataMediaChannel() {}
+ const std::vector<DataCodec>& recv_codecs() const { return recv_codecs_; }
+ const std::vector<DataCodec>& send_codecs() const { return send_codecs_; }
+ const std::vector<DataCodec>& codecs() const { return send_codecs(); }
+ int max_bps() const { return max_bps_; }
+
+ virtual bool SetSendParameters(const DataSendParameters& params) {
+ set_send_rtcp_parameters(params.rtcp);
+ return (SetSendCodecs(params.codecs) &&
+ SetMaxSendBandwidth(params.max_bandwidth_bps));
+ }
+ virtual bool SetRecvParameters(const DataRecvParameters& params) {
+ set_recv_rtcp_parameters(params.rtcp);
+ return SetRecvCodecs(params.codecs);
+ }
+ virtual bool SetSend(bool send) { return set_sending(send); }
+ virtual bool SetReceive(bool receive) {
+ set_playout(receive);
+ return true;
+ }
+ virtual bool AddRecvStream(const StreamParams& sp) {
+ if (!RtpHelper<DataMediaChannel>::AddRecvStream(sp))
+ return false;
+ return true;
+ }
+ virtual bool RemoveRecvStream(uint32_t ssrc) {
+ if (!RtpHelper<DataMediaChannel>::RemoveRecvStream(ssrc))
+ return false;
+ return true;
+ }
+
+ virtual bool SendData(const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ SendDataResult* result) {
+ if (send_blocked_) {
+ *result = SDR_BLOCK;
+ return false;
+ } else {
+ last_sent_data_params_ = params;
+ last_sent_data_ = std::string(payload.data<char>(), payload.size());
+ return true;
+ }
+ }
+
+ SendDataParams last_sent_data_params() { return last_sent_data_params_; }
+ std::string last_sent_data() { return last_sent_data_; }
+ bool is_send_blocked() { return send_blocked_; }
+ void set_send_blocked(bool blocked) { send_blocked_ = blocked; }
+
+ private:
+ bool SetRecvCodecs(const std::vector<DataCodec>& codecs) {
+ if (fail_set_recv_codecs()) {
+ // Fake the failure in SetRecvCodecs.
+ return false;
+ }
+ recv_codecs_ = codecs;
+ return true;
+ }
+ bool SetSendCodecs(const std::vector<DataCodec>& codecs) {
+ if (fail_set_send_codecs()) {
+ // Fake the failure in SetSendCodecs.
+ return false;
+ }
+ send_codecs_ = codecs;
+ return true;
+ }
+ bool SetMaxSendBandwidth(int bps) {
+ max_bps_ = bps;
+ return true;
+ }
+
+ std::vector<DataCodec> recv_codecs_;
+ std::vector<DataCodec> send_codecs_;
+ SendDataParams last_sent_data_params_;
+ std::string last_sent_data_;
+ bool send_blocked_;
+ int max_bps_;
+};
+
+// A base class for all of the shared parts between FakeVoiceEngine
+// and FakeVideoEngine.
+class FakeBaseEngine {
+ public:
+ FakeBaseEngine()
+ : options_changed_(false),
+ fail_create_channel_(false) {}
+ void set_fail_create_channel(bool fail) { fail_create_channel_ = fail; }
+
+ RtpCapabilities GetCapabilities() const { return capabilities_; }
+ void set_rtp_header_extensions(const std::vector<RtpExtension>& extensions) {
+ capabilities_.header_extensions = extensions;
+ }
+
+ void set_rtp_header_extensions(
+ const std::vector<cricket::RtpHeaderExtension>& extensions) {
+ for (const cricket::RtpHeaderExtension& ext : extensions) {
+ RtpExtension webrtc_ext;
+ webrtc_ext.uri = ext.uri;
+ webrtc_ext.id = ext.id;
+ capabilities_.header_extensions.push_back(webrtc_ext);
+ }
+ }
+
+ protected:
+ // Flag used by optionsmessagehandler_unittest for checking whether any
+ // relevant setting has been updated.
+ // TODO(thaloun): Replace with explicit checks of before & after values.
+ bool options_changed_;
+ bool fail_create_channel_;
+ RtpCapabilities capabilities_;
+};
+
+class FakeVoiceEngine : public FakeBaseEngine {
+ public:
+ FakeVoiceEngine() {
+ // Add a fake audio codec. Note that the name must not be "" as there are
+ // sanity checks against that.
+ codecs_.push_back(AudioCodec(101, "fake_audio_codec", 0, 0, 1));
+ }
+ void Init() {}
+ rtc::scoped_refptr<webrtc::AudioState> GetAudioState() const {
+ return rtc::scoped_refptr<webrtc::AudioState>();
+ }
+
+ VoiceMediaChannel* CreateChannel(webrtc::Call* call,
+ const MediaConfig& config,
+ const AudioOptions& options) {
+ if (fail_create_channel_) {
+ return nullptr;
+ }
+
+ FakeVoiceMediaChannel* ch = new FakeVoiceMediaChannel(this, options);
+ channels_.push_back(ch);
+ return ch;
+ }
+ FakeVoiceMediaChannel* GetChannel(size_t index) {
+ return (channels_.size() > index) ? channels_[index] : NULL;
+ }
+ void UnregisterChannel(VoiceMediaChannel* channel) {
+ channels_.erase(std::find(channels_.begin(), channels_.end(), channel));
+ }
+
+ // TODO(ossu): For proper testing, These should either individually settable
+ // or the voice engine should reference mockable factories.
+ const std::vector<AudioCodec>& send_codecs() { return codecs_; }
+ const std::vector<AudioCodec>& recv_codecs() { return codecs_; }
+ void SetCodecs(const std::vector<AudioCodec>& codecs) { codecs_ = codecs; }
+
+ int GetInputLevel() { return 0; }
+
+ bool StartAecDump(rtc::PlatformFile file, int64_t max_size_bytes) {
+ return false;
+ }
+
+ void StopAecDump() {}
+
+ bool StartRtcEventLog(rtc::PlatformFile file, int64_t max_size_bytes) {
+ return false;
+ }
+
+ void StopRtcEventLog() {}
+
+ private:
+ std::vector<FakeVoiceMediaChannel*> channels_;
+ std::vector<AudioCodec> codecs_;
+
+ friend class FakeMediaEngine;
+};
+
+class FakeVideoEngine : public FakeBaseEngine {
+ public:
+ FakeVideoEngine() : capture_(false) {
+ // Add a fake video codec. Note that the name must not be "" as there are
+ // sanity checks against that.
+ codecs_.push_back(VideoCodec(0, "fake_video_codec"));
+ }
+
+ bool SetOptions(const VideoOptions& options) {
+ options_ = options;
+ options_changed_ = true;
+ return true;
+ }
+
+ VideoMediaChannel* CreateChannel(webrtc::Call* call,
+ const MediaConfig& config,
+ const VideoOptions& options) {
+ if (fail_create_channel_) {
+ return nullptr;
+ }
+
+ FakeVideoMediaChannel* ch = new FakeVideoMediaChannel(this, options);
+ channels_.emplace_back(ch);
+ return ch;
+ }
+
+ FakeVideoMediaChannel* GetChannel(size_t index) {
+ return (channels_.size() > index) ? channels_[index] : nullptr;
+ }
+
+ void UnregisterChannel(VideoMediaChannel* channel) {
+ auto it = std::find(channels_.begin(), channels_.end(), channel);
+ RTC_DCHECK(it != channels_.end());
+ channels_.erase(it);
+ }
+
+ const std::vector<VideoCodec>& codecs() const { return codecs_; }
+
+ void SetCodecs(const std::vector<VideoCodec> codecs) { codecs_ = codecs; }
+
+ bool SetCapture(bool capture) {
+ capture_ = capture;
+ return true;
+ }
+
+ private:
+ std::vector<FakeVideoMediaChannel*> channels_;
+ std::vector<VideoCodec> codecs_;
+ bool capture_;
+ VideoOptions options_;
+
+ friend class FakeMediaEngine;
+};
+
+class FakeMediaEngine :
+ public CompositeMediaEngine<FakeVoiceEngine, FakeVideoEngine> {
+ public:
+ FakeMediaEngine()
+ : CompositeMediaEngine<FakeVoiceEngine, FakeVideoEngine>(std::tuple<>(),
+ std::tuple<>()) {
+ }
+
+ virtual ~FakeMediaEngine() {}
+
+ void SetAudioCodecs(const std::vector<AudioCodec>& codecs) {
+ voice().SetCodecs(codecs);
+ }
+ void SetVideoCodecs(const std::vector<VideoCodec>& codecs) {
+ video().SetCodecs(codecs);
+ }
+
+ void SetAudioRtpHeaderExtensions(
+ const std::vector<RtpExtension>& extensions) {
+ voice().set_rtp_header_extensions(extensions);
+ }
+ void SetVideoRtpHeaderExtensions(
+ const std::vector<RtpExtension>& extensions) {
+ video().set_rtp_header_extensions(extensions);
+ }
+
+ void SetAudioRtpHeaderExtensions(
+ const std::vector<cricket::RtpHeaderExtension>& extensions) {
+ voice().set_rtp_header_extensions(extensions);
+ }
+ void SetVideoRtpHeaderExtensions(
+ const std::vector<cricket::RtpHeaderExtension>& extensions) {
+ video().set_rtp_header_extensions(extensions);
+ }
+
+ FakeVoiceMediaChannel* GetVoiceChannel(size_t index) {
+ return voice().GetChannel(index);
+ }
+ FakeVideoMediaChannel* GetVideoChannel(size_t index) {
+ return video().GetChannel(index);
+ }
+
+ bool capture() const { return video().capture_; }
+ bool options_changed() const { return video().options_changed_; }
+ void clear_options_changed() { video().options_changed_ = false; }
+ void set_fail_create_channel(bool fail) {
+ voice().set_fail_create_channel(fail);
+ video().set_fail_create_channel(fail);
+ }
+};
+
+// Have to come afterwards due to declaration order
+inline FakeVoiceMediaChannel::~FakeVoiceMediaChannel() {
+ if (engine_) {
+ engine_->UnregisterChannel(this);
+ }
+}
+
+inline FakeVideoMediaChannel::~FakeVideoMediaChannel() {
+ if (engine_) {
+ engine_->UnregisterChannel(this);
+ }
+}
+
+class FakeDataEngine : public DataEngineInterface {
+ public:
+ virtual DataMediaChannel* CreateChannel(const MediaConfig& config) {
+ FakeDataMediaChannel* ch = new FakeDataMediaChannel(this, DataOptions());
+ channels_.push_back(ch);
+ return ch;
+ }
+
+ FakeDataMediaChannel* GetChannel(size_t index) {
+ return (channels_.size() > index) ? channels_[index] : NULL;
+ }
+
+ void UnregisterChannel(DataMediaChannel* channel) {
+ channels_.erase(std::find(channels_.begin(), channels_.end(), channel));
+ }
+
+ virtual void SetDataCodecs(const std::vector<DataCodec>& data_codecs) {
+ data_codecs_ = data_codecs;
+ }
+
+ virtual const std::vector<DataCodec>& data_codecs() { return data_codecs_; }
+
+ private:
+ std::vector<FakeDataMediaChannel*> channels_;
+ std::vector<DataCodec> data_codecs_;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_FAKEMEDIAENGINE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/fakenetworkinterface.h b/third_party/libwebrtc/webrtc/media/base/fakenetworkinterface.h
new file mode 100644
index 0000000000..3d98d1f917
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/fakenetworkinterface.h
@@ -0,0 +1,228 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_FAKENETWORKINTERFACE_H_
+#define MEDIA_BASE_FAKENETWORKINTERFACE_H_
+
+#include <map>
+#include <set>
+#include <vector>
+
+#include "media/base/mediachannel.h"
+#include "media/base/rtputils.h"
+#include "rtc_base/byteorder.h"
+#include "rtc_base/copyonwritebuffer.h"
+#include "rtc_base/criticalsection.h"
+#include "rtc_base/dscp.h"
+#include "rtc_base/messagehandler.h"
+#include "rtc_base/messagequeue.h"
+#include "rtc_base/thread.h"
+
+namespace cricket {
+
+// Fake NetworkInterface that sends/receives RTP/RTCP packets.
+class FakeNetworkInterface : public MediaChannel::NetworkInterface,
+ public rtc::MessageHandler {
+ public:
+ FakeNetworkInterface()
+ : thread_(rtc::Thread::Current()),
+ dest_(NULL),
+ conf_(false),
+ sendbuf_size_(-1),
+ recvbuf_size_(-1),
+ dscp_(rtc::DSCP_NO_CHANGE) {
+ }
+
+ void SetDestination(MediaChannel* dest) { dest_ = dest; }
+
+ // Conference mode is a mode where instead of simply forwarding the packets,
+ // the transport will send multiple copies of the packet with the specified
+ // SSRCs. This allows us to simulate receiving media from multiple sources.
+ void SetConferenceMode(bool conf, const std::vector<uint32_t>& ssrcs) {
+ rtc::CritScope cs(&crit_);
+ conf_ = conf;
+ conf_sent_ssrcs_ = ssrcs;
+ }
+
+ int NumRtpBytes() {
+ rtc::CritScope cs(&crit_);
+ int bytes = 0;
+ for (size_t i = 0; i < rtp_packets_.size(); ++i) {
+ bytes += static_cast<int>(rtp_packets_[i].size());
+ }
+ return bytes;
+ }
+
+ int NumRtpBytes(uint32_t ssrc) {
+ rtc::CritScope cs(&crit_);
+ int bytes = 0;
+ GetNumRtpBytesAndPackets(ssrc, &bytes, NULL);
+ return bytes;
+ }
+
+ int NumRtpPackets() {
+ rtc::CritScope cs(&crit_);
+ return static_cast<int>(rtp_packets_.size());
+ }
+
+ int NumRtpPackets(uint32_t ssrc) {
+ rtc::CritScope cs(&crit_);
+ int packets = 0;
+ GetNumRtpBytesAndPackets(ssrc, NULL, &packets);
+ return packets;
+ }
+
+ int NumSentSsrcs() {
+ rtc::CritScope cs(&crit_);
+ return static_cast<int>(sent_ssrcs_.size());
+ }
+
+ // Note: callers are responsible for deleting the returned buffer.
+ const rtc::CopyOnWriteBuffer* GetRtpPacket(int index) {
+ rtc::CritScope cs(&crit_);
+ if (index >= NumRtpPackets()) {
+ return NULL;
+ }
+ return new rtc::CopyOnWriteBuffer(rtp_packets_[index]);
+ }
+
+ int NumRtcpPackets() {
+ rtc::CritScope cs(&crit_);
+ return static_cast<int>(rtcp_packets_.size());
+ }
+
+ // Note: callers are responsible for deleting the returned buffer.
+ const rtc::CopyOnWriteBuffer* GetRtcpPacket(int index) {
+ rtc::CritScope cs(&crit_);
+ if (index >= NumRtcpPackets()) {
+ return NULL;
+ }
+ return new rtc::CopyOnWriteBuffer(rtcp_packets_[index]);
+ }
+
+ int sendbuf_size() const { return sendbuf_size_; }
+ int recvbuf_size() const { return recvbuf_size_; }
+ rtc::DiffServCodePoint dscp() const { return dscp_; }
+
+ protected:
+ virtual bool SendPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) {
+ rtc::CritScope cs(&crit_);
+
+ uint32_t cur_ssrc = 0;
+ if (!GetRtpSsrc(packet->data(), packet->size(), &cur_ssrc)) {
+ return false;
+ }
+ sent_ssrcs_[cur_ssrc]++;
+
+ rtp_packets_.push_back(*packet);
+ if (conf_) {
+ for (size_t i = 0; i < conf_sent_ssrcs_.size(); ++i) {
+ if (!SetRtpSsrc(packet->data(), packet->size(),
+ conf_sent_ssrcs_[i])) {
+ return false;
+ }
+ PostMessage(ST_RTP, *packet);
+ }
+ } else {
+ PostMessage(ST_RTP, *packet);
+ }
+ return true;
+ }
+
+ virtual bool SendRtcp(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) {
+ rtc::CritScope cs(&crit_);
+ rtcp_packets_.push_back(*packet);
+ if (!conf_) {
+ // don't worry about RTCP in conf mode for now
+ PostMessage(ST_RTCP, *packet);
+ }
+ return true;
+ }
+
+ virtual int SetOption(SocketType type, rtc::Socket::Option opt,
+ int option) {
+ if (opt == rtc::Socket::OPT_SNDBUF) {
+ sendbuf_size_ = option;
+ } else if (opt == rtc::Socket::OPT_RCVBUF) {
+ recvbuf_size_ = option;
+ } else if (opt == rtc::Socket::OPT_DSCP) {
+ dscp_ = static_cast<rtc::DiffServCodePoint>(option);
+ }
+ return 0;
+ }
+
+ void PostMessage(int id, const rtc::CopyOnWriteBuffer& packet) {
+ thread_->Post(RTC_FROM_HERE, this, id, rtc::WrapMessageData(packet));
+ }
+
+ virtual void OnMessage(rtc::Message* msg) {
+ rtc::TypedMessageData<rtc::CopyOnWriteBuffer>* msg_data =
+ static_cast<rtc::TypedMessageData<rtc::CopyOnWriteBuffer>*>(
+ msg->pdata);
+ if (dest_) {
+ if (msg->message_id == ST_RTP) {
+ dest_->OnPacketReceived(&msg_data->data(),
+ rtc::CreatePacketTime(0));
+ } else {
+ dest_->OnRtcpReceived(&msg_data->data(),
+ rtc::CreatePacketTime(0));
+ }
+ }
+ delete msg_data;
+ }
+
+ private:
+ void GetNumRtpBytesAndPackets(uint32_t ssrc, int* bytes, int* packets) {
+ if (bytes) {
+ *bytes = 0;
+ }
+ if (packets) {
+ *packets = 0;
+ }
+ uint32_t cur_ssrc = 0;
+ for (size_t i = 0; i < rtp_packets_.size(); ++i) {
+ if (!GetRtpSsrc(rtp_packets_[i].data(), rtp_packets_[i].size(),
+ &cur_ssrc)) {
+ return;
+ }
+ if (ssrc == cur_ssrc) {
+ if (bytes) {
+ *bytes += static_cast<int>(rtp_packets_[i].size());
+ }
+ if (packets) {
+ ++(*packets);
+ }
+ }
+ }
+ }
+
+ rtc::Thread* thread_;
+ MediaChannel* dest_;
+ bool conf_;
+ // The ssrcs used in sending out packets in conference mode.
+ std::vector<uint32_t> conf_sent_ssrcs_;
+ // Map to track counts of packets that have been sent per ssrc.
+ // This includes packets that are dropped.
+ std::map<uint32_t, uint32_t> sent_ssrcs_;
+ // Map to track packet-number that needs to be dropped per ssrc.
+ std::map<uint32_t, std::set<uint32_t> > drop_map_;
+ rtc::CriticalSection crit_;
+ std::vector<rtc::CopyOnWriteBuffer> rtp_packets_;
+ std::vector<rtc::CopyOnWriteBuffer> rtcp_packets_;
+ int sendbuf_size_;
+ int recvbuf_size_;
+ rtc::DiffServCodePoint dscp_;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_FAKENETWORKINTERFACE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/fakertp.cc b/third_party/libwebrtc/webrtc/media/base/fakertp.cc
new file mode 100644
index 0000000000..be1631b73f
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/fakertp.cc
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+
+#include "media/base/fakertp.h"
+#include "rtc_base/gunit.h"
+
+void CompareHeaderExtensions(const char* packet1, size_t packet1_size,
+ const char* packet2, size_t packet2_size,
+ const std::vector<int> encrypted_headers, bool expect_equal) {
+ // Sanity check: packets must be large enough to contain the RTP header and
+ // extensions header.
+ RTC_CHECK_GE(packet1_size, 12 + 4);
+ RTC_CHECK_GE(packet2_size, 12 + 4);
+ // RTP extension headers are the same.
+ EXPECT_EQ(0, memcmp(packet1 + 12, packet2 + 12, 4));
+ // Check for one-byte header extensions.
+ EXPECT_EQ('\xBE', packet1[12]);
+ EXPECT_EQ('\xDE', packet1[13]);
+ // Determine position and size of extension headers.
+ size_t extension_words = packet1[14] << 8 | packet1[15];
+ const char* extension_data1 = packet1 + 12 + 4;
+ const char* extension_end1 = extension_data1 + extension_words * 4;
+ const char* extension_data2 = packet2 + 12 + 4;
+ // Sanity check: packets must be large enough to contain the RTP header
+ // extensions.
+ RTC_CHECK_GE(packet1_size, 12 + 4 + extension_words * 4);
+ RTC_CHECK_GE(packet2_size, 12 + 4 + extension_words * 4);
+ while (extension_data1 < extension_end1) {
+ uint8_t id = (*extension_data1 & 0xf0) >> 4;
+ uint8_t len = (*extension_data1 & 0x0f) +1;
+ extension_data1++;
+ extension_data2++;
+ EXPECT_LE(extension_data1, extension_end1);
+ if (id == 15) {
+ // Finished parsing.
+ break;
+ }
+
+ // The header extension doesn't get encrypted if the id is not in the
+ // list of header extensions to encrypt.
+ if (expect_equal ||
+ std::find(encrypted_headers.begin(), encrypted_headers.end(), id)
+ == encrypted_headers.end()) {
+ EXPECT_EQ(0, memcmp(extension_data1, extension_data2, len));
+ } else {
+ EXPECT_NE(0, memcmp(extension_data1, extension_data2, len));
+ }
+
+ extension_data1 += len;
+ extension_data2 += len;
+ // Skip padding.
+ while (extension_data1 < extension_end1 && *extension_data1 == 0) {
+ extension_data1++;
+ extension_data2++;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/webrtc/media/base/fakertp.h b/third_party/libwebrtc/webrtc/media/base/fakertp.h
new file mode 100644
index 0000000000..6786bab6d9
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/fakertp.h
@@ -0,0 +1,140 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Fake RTP and RTCP packets to use in unit tests.
+
+#ifndef MEDIA_BASE_FAKERTP_H_
+#define MEDIA_BASE_FAKERTP_H_
+
+#include <vector>
+
+// A typical PCMU RTP packet.
+// PT=0, SN=1, TS=0, SSRC=1
+// all data FF
+static const unsigned char kPcmuFrame[] = {
+ 0x80, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+};
+
+static const int kHeaderExtensionIDs[] = {1, 4};
+
+// A typical PCMU RTP packet with header extensions.
+// PT=0, SN=1, TS=0, SSRC=1
+// all data FF
+static const unsigned char kPcmuFrameWithExtensions[] = {
+ 0x90, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
+ // RFC 5285, section 4.2. One-Byte Header.
+ 0xBE, 0xDE,
+ // Header extension length 6 * 32 bits.
+ 0x00, 0x06,
+ // 8 bytes header id 1.
+ 0x17, 0x41, 0x42, 0x73, 0xA4, 0x75, 0x26, 0x27, 0x48,
+ // 3 bytes header id 2.
+ 0x22, 0x00, 0x00, 0xC8,
+ // 1 byte header id 3.
+ 0x30, 0x8E,
+ // 7 bytes header id 4.
+ 0x46, 0x55, 0x99, 0x63, 0x86, 0xB3, 0x95, 0xFB,
+ // 1 byte header padding.
+ 0x00,
+ // Payload data.
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+};
+
+// A typical Receiver Report RTCP packet.
+// PT=RR, LN=1, SSRC=1
+// send SSRC=2, all other fields 0
+static const unsigned char kRtcpReport[] = {
+ 0x80, 0xc9, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01,
+ 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
+};
+
+// PT = 97, TS = 0, Seq = 1, SSRC = 2
+// H264 - NRI = 1, Type = 1, bit stream = FF
+
+static const unsigned char kH264Packet[] = {
+ 0x80, 0x61, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
+ 0x21, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+};
+
+// PT= 101, SN=2, TS=3, SSRC = 4
+static const unsigned char kDataPacket[] = {
+ 0x80, 0x65, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
+};
+
+// This expects both packets to be based on kPcmuFrameWithExtensions.
+// Header extensions with an id in "encrypted_headers" are expected to be
+// different in the packets unless "expect_equal" is set to "true".
+void CompareHeaderExtensions(const char* packet1, size_t packet1_size,
+ const char* packet2, size_t packet2_size,
+ const std::vector<int> encrypted_headers, bool expect_equal);
+
+#endif // MEDIA_BASE_FAKERTP_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/fakevideocapturer.h b/third_party/libwebrtc/webrtc/media/base/fakevideocapturer.h
new file mode 100644
index 0000000000..536fe16356
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/fakevideocapturer.h
@@ -0,0 +1,152 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_FAKEVIDEOCAPTURER_H_
+#define MEDIA_BASE_FAKEVIDEOCAPTURER_H_
+
+#include <string.h>
+
+#include <memory>
+#include <vector>
+
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "media/base/videocapturer.h"
+#include "media/base/videocommon.h"
+#include "rtc_base/timeutils.h"
+
+namespace cricket {
+
+// Fake video capturer that allows the test to manually pump in frames.
+class FakeVideoCapturer : public cricket::VideoCapturer {
+ public:
+ explicit FakeVideoCapturer(bool is_screencast)
+ : running_(false),
+ initial_timestamp_(rtc::TimeNanos()),
+ next_timestamp_(rtc::kNumNanosecsPerMillisec),
+ is_screencast_(is_screencast),
+ rotation_(webrtc::kVideoRotation_0) {
+ // Default supported formats. Use ResetSupportedFormats to over write.
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(160, 120,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(60), cricket::FOURCC_I420));
+ ResetSupportedFormats(formats);
+ }
+ FakeVideoCapturer() : FakeVideoCapturer(false) {}
+
+ ~FakeVideoCapturer() {
+ SignalDestroyed(this);
+ }
+
+ void ResetSupportedFormats(const std::vector<cricket::VideoFormat>& formats) {
+ SetSupportedFormats(formats);
+ }
+ bool CaptureFrame() {
+ if (!GetCaptureFormat()) {
+ return false;
+ }
+ return CaptureCustomFrame(GetCaptureFormat()->width,
+ GetCaptureFormat()->height,
+ GetCaptureFormat()->interval,
+ GetCaptureFormat()->fourcc);
+ }
+ bool CaptureCustomFrame(int width, int height, uint32_t fourcc) {
+ // Default to 30fps.
+ return CaptureCustomFrame(width, height, rtc::kNumNanosecsPerSec / 30,
+ fourcc);
+ }
+ bool CaptureCustomFrame(int width,
+ int height,
+ int64_t timestamp_interval,
+ uint32_t fourcc) {
+ if (!running_) {
+ return false;
+ }
+ RTC_CHECK(fourcc == FOURCC_I420);
+ RTC_CHECK(width > 0);
+ RTC_CHECK(height > 0);
+
+ int adapted_width;
+ int adapted_height;
+ int crop_width;
+ int crop_height;
+ int crop_x;
+ int crop_y;
+
+ // TODO(nisse): It's a bit silly to have this logic in a fake
+ // class. Child classes of VideoCapturer are expected to call
+ // AdaptFrame, and the test case
+ // VideoCapturerTest.SinkWantsMaxPixelAndMaxPixelCountStepUp
+ // depends on this.
+ if (AdaptFrame(width, height,
+ next_timestamp_ / rtc::kNumNanosecsPerMicrosec,
+ next_timestamp_ / rtc::kNumNanosecsPerMicrosec,
+ &adapted_width, &adapted_height, &crop_width, &crop_height,
+ &crop_x, &crop_y, nullptr)) {
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer(
+ webrtc::I420Buffer::Create(adapted_width, adapted_height));
+ buffer->InitializeData();
+
+ OnFrame(webrtc::VideoFrame(
+ buffer, rotation_,
+ next_timestamp_ / rtc::kNumNanosecsPerMicrosec),
+ width, height);
+ }
+ next_timestamp_ += timestamp_interval;
+
+ return true;
+ }
+
+ sigslot::signal1<FakeVideoCapturer*> SignalDestroyed;
+
+ cricket::CaptureState Start(const cricket::VideoFormat& format) override {
+ SetCaptureFormat(&format);
+ running_ = true;
+ SetCaptureState(cricket::CS_RUNNING);
+ return cricket::CS_RUNNING;
+ }
+ void Stop() override {
+ running_ = false;
+ SetCaptureFormat(NULL);
+ SetCaptureState(cricket::CS_STOPPED);
+ }
+ bool IsRunning() override { return running_; }
+ bool IsScreencast() const override { return is_screencast_; }
+ bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) override {
+ fourccs->push_back(cricket::FOURCC_I420);
+ fourccs->push_back(cricket::FOURCC_MJPG);
+ return true;
+ }
+
+ void SetRotation(webrtc::VideoRotation rotation) {
+ rotation_ = rotation;
+ }
+
+ webrtc::VideoRotation GetRotation() { return rotation_; }
+
+ private:
+ bool running_;
+ int64_t initial_timestamp_;
+ int64_t next_timestamp_;
+ const bool is_screencast_;
+ webrtc::VideoRotation rotation_;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_FAKEVIDEOCAPTURER_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/fakevideorenderer.h b/third_party/libwebrtc/webrtc/media/base/fakevideorenderer.h
new file mode 100644
index 0000000000..630063057b
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/fakevideorenderer.h
@@ -0,0 +1,142 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_FAKEVIDEORENDERER_H_
+#define MEDIA_BASE_FAKEVIDEORENDERER_H_
+
+#include "api/video/video_frame.h"
+#include "media/base/videosinkinterface.h"
+#include "rtc_base/criticalsection.h"
+#include "rtc_base/logging.h"
+
+namespace cricket {
+
+// Faked video renderer that has a callback for actions on rendering.
+class FakeVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ FakeVideoRenderer()
+ : errors_(0),
+ width_(0),
+ height_(0),
+ rotation_(webrtc::kVideoRotation_0),
+ timestamp_us_(0),
+ num_rendered_frames_(0),
+ black_frame_(false) {}
+
+ virtual void OnFrame(const webrtc::VideoFrame& frame) {
+ rtc::CritScope cs(&crit_);
+ // TODO(zhurunz) Check with VP8 team to see if we can remove this
+ // tolerance on Y values. Some unit tests produce Y values close
+ // to 16 rather than close to zero, for supposedly black frames.
+ // Largest value observed is 34, e.g., running
+ // PeerConnectionIntegrationTest.SendAndReceive16To9AspectRatio.
+ black_frame_ = CheckFrameColorYuv(0, 48, 128, 128, 128, 128, &frame);
+ // Treat unexpected frame size as error.
+ ++num_rendered_frames_;
+ width_ = frame.width();
+ height_ = frame.height();
+ rotation_ = frame.rotation();
+ timestamp_us_ = frame.timestamp_us();
+ }
+
+ int errors() const { return errors_; }
+ int width() const {
+ rtc::CritScope cs(&crit_);
+ return width_;
+ }
+ int height() const {
+ rtc::CritScope cs(&crit_);
+ return height_;
+ }
+ webrtc::VideoRotation rotation() const {
+ rtc::CritScope cs(&crit_);
+ return rotation_;
+ }
+
+ int64_t timestamp_us() const {
+ rtc::CritScope cs(&crit_);
+ return timestamp_us_;
+ }
+ int num_rendered_frames() const {
+ rtc::CritScope cs(&crit_);
+ return num_rendered_frames_;
+ }
+ bool black_frame() const {
+ rtc::CritScope cs(&crit_);
+ return black_frame_;
+ }
+
+ private:
+ static bool CheckFrameColorYuv(uint8_t y_min,
+ uint8_t y_max,
+ uint8_t u_min,
+ uint8_t u_max,
+ uint8_t v_min,
+ uint8_t v_max,
+ const webrtc::VideoFrame* frame) {
+ if (!frame || !frame->video_frame_buffer()) {
+ return false;
+ }
+ rtc::scoped_refptr<const webrtc::I420BufferInterface> i420_buffer =
+ frame->video_frame_buffer()->ToI420();
+ // Y
+ int y_width = frame->width();
+ int y_height = frame->height();
+ const uint8_t* y_plane = i420_buffer->DataY();
+ const uint8_t* y_pos = y_plane;
+ int32_t y_pitch = i420_buffer->StrideY();
+ for (int i = 0; i < y_height; ++i) {
+ for (int j = 0; j < y_width; ++j) {
+ uint8_t y_value = *(y_pos + j);
+ if (y_value < y_min || y_value > y_max) {
+ return false;
+ }
+ }
+ y_pos += y_pitch;
+ }
+ // U and V
+ int chroma_width = i420_buffer->ChromaWidth();
+ int chroma_height = i420_buffer->ChromaHeight();
+ const uint8_t* u_plane = i420_buffer->DataU();
+ const uint8_t* v_plane = i420_buffer->DataV();
+ const uint8_t* u_pos = u_plane;
+ const uint8_t* v_pos = v_plane;
+ int32_t u_pitch = i420_buffer->StrideU();
+ int32_t v_pitch = i420_buffer->StrideV();
+ for (int i = 0; i < chroma_height; ++i) {
+ for (int j = 0; j < chroma_width; ++j) {
+ uint8_t u_value = *(u_pos + j);
+ if (u_value < u_min || u_value > u_max) {
+ return false;
+ }
+ uint8_t v_value = *(v_pos + j);
+ if (v_value < v_min || v_value > v_max) {
+ return false;
+ }
+ }
+ u_pos += u_pitch;
+ v_pos += v_pitch;
+ }
+ return true;
+ }
+
+ int errors_;
+ int width_;
+ int height_;
+ webrtc::VideoRotation rotation_;
+ int64_t timestamp_us_;
+ int num_rendered_frames_;
+ bool black_frame_;
+ rtc::CriticalSection crit_;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_FAKEVIDEORENDERER_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.cc b/third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.cc
new file mode 100644
index 0000000000..4731c18f40
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.cc
@@ -0,0 +1,310 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/h264_profile_level_id.h"
+
+#include <cstdio>
+#include <cstdlib>
+#include <cstring>
+
+#include "rtc_base/arraysize.h"
+
+namespace webrtc {
+namespace H264 {
+
+namespace {
+
+const char kProfileLevelId[] = "profile-level-id";
+const char kLevelAsymmetryAllowed[] = "level-asymmetry-allowed";
+
+// For level_idc=11 and profile_idc=0x42, 0x4D, or 0x58, the constraint set3
+// flag specifies if level 1b or level 1.1 is used.
+const uint8_t kConstraintSet3Flag = 0x10;
+
+// Convert a string of 8 characters into a byte where the positions containing
+// character c will have their bit set. For example, c = 'x', str = "x1xx0000"
+// will return 0b10110000. constexpr is used so that the pattern table in
+// kProfilePatterns is statically initialized.
+constexpr uint8_t ByteMaskString(char c, const char (&str)[9]) {
+ return (str[0] == c) << 7 | (str[1] == c) << 6 | (str[2] == c) << 5 |
+ (str[3] == c) << 4 | (str[4] == c) << 3 | (str[5] == c) << 2 |
+ (str[6] == c) << 1 | (str[7] == c) << 0;
+}
+
+// Class for matching bit patterns such as "x1xx0000" where 'x' is allowed to be
+// either 0 or 1.
+class BitPattern {
+ public:
+ explicit constexpr BitPattern(const char (&str)[9])
+ : mask_(~ByteMaskString('x', str)),
+ masked_value_(ByteMaskString('1', str)) {}
+
+ bool IsMatch(uint8_t value) const { return masked_value_ == (value & mask_); }
+
+ private:
+ const uint8_t mask_;
+ const uint8_t masked_value_;
+};
+
+// Table for converting between profile_idc/profile_iop to H264::Profile.
+struct ProfilePattern {
+ const uint8_t profile_idc;
+ const BitPattern profile_iop;
+ const Profile profile;
+};
+
+// This is from https://tools.ietf.org/html/rfc6184#section-8.1.
+constexpr ProfilePattern kProfilePatterns[] = {
+ {0x42, BitPattern("x1xx0000"), kProfileConstrainedBaseline},
+ {0x4D, BitPattern("1xxx0000"), kProfileConstrainedBaseline},
+ {0x58, BitPattern("11xx0000"), kProfileConstrainedBaseline},
+ {0x42, BitPattern("x0xx0000"), kProfileBaseline},
+ {0x58, BitPattern("10xx0000"), kProfileBaseline},
+ {0x4D, BitPattern("0x0x0000"), kProfileMain},
+ {0x64, BitPattern("00000000"), kProfileHigh},
+ {0x64, BitPattern("00001100"), kProfileConstrainedHigh}};
+
+// Compare H264 levels and handle the level 1b case.
+bool IsLess(Level a, Level b) {
+ if (a == kLevel1_b)
+ return b != kLevel1 && b != kLevel1_b;
+ if (b == kLevel1_b)
+ return a == kLevel1;
+ return a < b;
+}
+
+Level Min(Level a, Level b) {
+ return IsLess(a, b) ? a : b;
+}
+
+bool IsLevelAsymmetryAllowed(const CodecParameterMap& params) {
+ const auto it = params.find(kLevelAsymmetryAllowed);
+ return it != params.end() && strcmp(it->second.c_str(), "1") == 0;
+}
+
+struct LevelConstraint {
+ const int max_macroblocks_per_second;
+ const int max_macroblock_frame_size;
+ const webrtc::H264::Level level;
+};
+
+// This is from ITU-T H.264 (02/2016) Table A-1 – Level limits.
+static constexpr LevelConstraint kLevelConstraints[] = {
+ {1485, 99, webrtc::H264::kLevel1},
+ {1485, 99, webrtc::H264::kLevel1_b},
+ {3000, 396, webrtc::H264::kLevel1_1},
+ {6000, 396, webrtc::H264::kLevel1_2},
+ {11880, 396, webrtc::H264::kLevel1_3},
+ {11880, 396, webrtc::H264::kLevel2},
+ {19800, 792, webrtc::H264::kLevel2_1},
+ {20250, 1620, webrtc::H264::kLevel2_2},
+ {40500, 1620, webrtc::H264::kLevel3},
+ {108000, 3600, webrtc::H264::kLevel3_1},
+ {216000, 5120, webrtc::H264::kLevel3_2},
+ {245760, 8192, webrtc::H264::kLevel4},
+ {245760, 8192, webrtc::H264::kLevel4_1},
+ {522240, 8704, webrtc::H264::kLevel4_2},
+ {589824, 22080, webrtc::H264::kLevel5},
+ {983040, 3684, webrtc::H264::kLevel5_1},
+ {2073600, 3684, webrtc::H264::kLevel5_2},
+};
+
+} // anonymous namespace
+
+rtc::Optional<ProfileLevelId> ParseProfileLevelId(const char* str) {
+ // The string should consist of 3 bytes in hexadecimal format.
+ if (strlen(str) != 6u)
+ return rtc::nullopt;
+ const uint32_t profile_level_id_numeric = strtol(str, nullptr, 16);
+ if (profile_level_id_numeric == 0)
+ return rtc::nullopt;
+
+ // Separate into three bytes.
+ const uint8_t level_idc =
+ static_cast<uint8_t>(profile_level_id_numeric & 0xFF);
+ const uint8_t profile_iop =
+ static_cast<uint8_t>((profile_level_id_numeric >> 8) & 0xFF);
+ const uint8_t profile_idc =
+ static_cast<uint8_t>((profile_level_id_numeric >> 16) & 0xFF);
+
+ // Parse level based on level_idc and constraint set 3 flag.
+ Level level;
+ switch (level_idc) {
+ case kLevel1_1:
+ level = (profile_iop & kConstraintSet3Flag) != 0 ? kLevel1_b : kLevel1_1;
+ break;
+ case kLevel1:
+ case kLevel1_2:
+ case kLevel1_3:
+ case kLevel2:
+ case kLevel2_1:
+ case kLevel2_2:
+ case kLevel3:
+ case kLevel3_1:
+ case kLevel3_2:
+ case kLevel4:
+ case kLevel4_1:
+ case kLevel4_2:
+ case kLevel5:
+ case kLevel5_1:
+ case kLevel5_2:
+ level = static_cast<Level>(level_idc);
+ break;
+ default:
+ // Unrecognized level_idc.
+ return rtc::nullopt;
+ }
+
+ // Parse profile_idc/profile_iop into a Profile enum.
+ for (const ProfilePattern& pattern : kProfilePatterns) {
+ if (profile_idc == pattern.profile_idc &&
+ pattern.profile_iop.IsMatch(profile_iop)) {
+ return ProfileLevelId(pattern.profile, level);
+ }
+ }
+
+ // Unrecognized profile_idc/profile_iop combination.
+ return rtc::nullopt;
+}
+
+rtc::Optional<Level> SupportedLevel(int max_frame_pixel_count, float max_fps) {
+ static const int kPixelsPerMacroblock = 16 * 16;
+
+ for (int i = arraysize(kLevelConstraints) - 1; i >= 0; --i) {
+ const LevelConstraint& level_constraint = kLevelConstraints[i];
+ if (level_constraint.max_macroblock_frame_size * kPixelsPerMacroblock <=
+ max_frame_pixel_count &&
+ level_constraint.max_macroblocks_per_second <=
+ max_fps * level_constraint.max_macroblock_frame_size) {
+ return level_constraint.level;
+ }
+ }
+
+ // No level supported.
+ return rtc::nullopt;
+}
+
+rtc::Optional<ProfileLevelId> ParseSdpProfileLevelId(
+ const CodecParameterMap& params) {
+ // TODO(magjed): The default should really be kProfileBaseline and kLevel1
+ // according to the spec: https://tools.ietf.org/html/rfc6184#section-8.1. In
+ // order to not break backwards compatibility with older versions of WebRTC
+ // where external codecs don't have any parameters, use
+ // kProfileConstrainedBaseline kLevel3_1 instead. This workaround will only be
+ // done in an interim period to allow external clients to update their code.
+ // http://crbug/webrtc/6337.
+ static const ProfileLevelId kDefaultProfileLevelId(
+ kProfileConstrainedBaseline, kLevel3_1);
+
+ const auto profile_level_id_it = params.find(kProfileLevelId);
+ return (profile_level_id_it == params.end())
+ ? kDefaultProfileLevelId
+ : ParseProfileLevelId(profile_level_id_it->second.c_str());
+}
+
+rtc::Optional<std::string> ProfileLevelIdToString(
+ const ProfileLevelId& profile_level_id) {
+ // Handle special case level == 1b.
+ if (profile_level_id.level == kLevel1_b) {
+ switch (profile_level_id.profile) {
+ case kProfileConstrainedBaseline:
+ return {"42f00b"};
+ case kProfileBaseline:
+ return {"42100b"};
+ case kProfileMain:
+ return {"4d100b"};
+ // Level 1b is not allowed for other profiles.
+ default:
+ return rtc::nullopt;
+ }
+ }
+
+ const char* profile_idc_iop_string;
+ switch (profile_level_id.profile) {
+ case kProfileConstrainedBaseline:
+ profile_idc_iop_string = "42e0";
+ break;
+ case kProfileBaseline:
+ profile_idc_iop_string = "4200";
+ break;
+ case kProfileMain:
+ profile_idc_iop_string = "4d00";
+ break;
+ case kProfileConstrainedHigh:
+ profile_idc_iop_string = "640c";
+ break;
+ case kProfileHigh:
+ profile_idc_iop_string = "6400";
+ break;
+ // Unrecognized profile.
+ default:
+ return rtc::nullopt;
+ }
+
+ char str[7];
+ snprintf(str, 7u, "%s%02x", profile_idc_iop_string, profile_level_id.level);
+ return {str};
+}
+
+// Set level according to https://tools.ietf.org/html/rfc6184#section-8.2.2.
+void GenerateProfileLevelIdForAnswer(
+ const CodecParameterMap& local_supported_params,
+ const CodecParameterMap& remote_offered_params,
+ CodecParameterMap* answer_params) {
+ // If both local and remote haven't set profile-level-id, they are both using
+ // the default profile. In this case, don't set profile-level-id in answer
+ // either.
+ if (!local_supported_params.count(kProfileLevelId) &&
+ !remote_offered_params.count(kProfileLevelId)) {
+ return;
+ }
+
+ // Parse profile-level-ids.
+ const rtc::Optional<ProfileLevelId> local_profile_level_id =
+ ParseSdpProfileLevelId(local_supported_params);
+ const rtc::Optional<ProfileLevelId> remote_profile_level_id =
+ ParseSdpProfileLevelId(remote_offered_params);
+ // The local and remote codec must have valid and equal H264 Profiles.
+ RTC_DCHECK(local_profile_level_id);
+ RTC_DCHECK(remote_profile_level_id);
+ RTC_DCHECK_EQ(local_profile_level_id->profile,
+ remote_profile_level_id->profile);
+
+ // Parse level information.
+ const bool level_asymmetry_allowed =
+ IsLevelAsymmetryAllowed(local_supported_params) &&
+ IsLevelAsymmetryAllowed(remote_offered_params);
+ const Level local_level = local_profile_level_id->level;
+ const Level remote_level = remote_profile_level_id->level;
+ const Level min_level = Min(local_level, remote_level);
+
+ // Determine answer level. When level asymmetry is not allowed, level upgrade
+ // is not allowed, i.e., the level in the answer must be equal to or lower
+ // than the level in the offer.
+ const Level answer_level = level_asymmetry_allowed ? local_level : min_level;
+
+ // Set the resulting profile-level-id in the answer parameters.
+ (*answer_params)[kProfileLevelId] = *ProfileLevelIdToString(
+ ProfileLevelId(local_profile_level_id->profile, answer_level));
+}
+
+bool IsSameH264Profile(const CodecParameterMap& params1,
+ const CodecParameterMap& params2) {
+ const rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id =
+ webrtc::H264::ParseSdpProfileLevelId(params1);
+ const rtc::Optional<webrtc::H264::ProfileLevelId> other_profile_level_id =
+ webrtc::H264::ParseSdpProfileLevelId(params2);
+ // Compare H264 profiles, but not levels.
+ return profile_level_id && other_profile_level_id &&
+ profile_level_id->profile == other_profile_level_id->profile;
+}
+
+} // namespace H264
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.h b/third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.h
new file mode 100644
index 0000000000..28899e53d9
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_H264_PROFILE_LEVEL_ID_H_
+#define MEDIA_BASE_H264_PROFILE_LEVEL_ID_H_
+
+#include <map>
+#include <string>
+
+#include "api/optional.h"
+#include "common_types.h" // NOLINT(build/include)
+
+namespace webrtc {
+namespace H264 {
+
+// Map containting SDP codec parameters.
+typedef std::map<std::string, std::string> CodecParameterMap;
+
+// All values are equal to ten times the level number, except level 1b which is
+// special.
+enum Level {
+ kLevel1_b = 0,
+ kLevel1 = 10,
+ kLevel1_1 = 11,
+ kLevel1_2 = 12,
+ kLevel1_3 = 13,
+ kLevel2 = 20,
+ kLevel2_1 = 21,
+ kLevel2_2 = 22,
+ kLevel3 = 30,
+ kLevel3_1 = 31,
+ kLevel3_2 = 32,
+ kLevel4 = 40,
+ kLevel4_1 = 41,
+ kLevel4_2 = 42,
+ kLevel5 = 50,
+ kLevel5_1 = 51,
+ kLevel5_2 = 52
+};
+
+struct ProfileLevelId {
+ ProfileLevelId(Profile profile, Level level)
+ : profile(profile), level(level) {}
+ Profile profile;
+ Level level;
+};
+
+// Parse profile level id that is represented as a string of 3 hex bytes.
+// Nothing will be returned if the string is not a recognized H264
+// profile level id.
+rtc::Optional<ProfileLevelId> ParseProfileLevelId(const char* str);
+
+// Parse profile level id that is represented as a string of 3 hex bytes
+// contained in an SDP key-value map. A default profile level id will be
+// returned if the profile-level-id key is missing. Nothing will be returned if
+// the key is present but the string is invalid.
+rtc::Optional<ProfileLevelId> ParseSdpProfileLevelId(
+ const CodecParameterMap& params);
+
+// Given that a decoder supports up to a given frame size (in pixels) at up to a
+// given number of frames per second, return the highest H.264 level where it
+// can guarantee that it will be able to support all valid encoded streams that
+// are within that level.
+rtc::Optional<Level> SupportedLevel(int max_frame_pixel_count, float max_fps);
+
+// Returns canonical string representation as three hex bytes of the profile
+// level id, or returns nothing for invalid profile level ids.
+rtc::Optional<std::string> ProfileLevelIdToString(
+ const ProfileLevelId& profile_level_id);
+
+// Generate codec parameters that will be used as answer in an SDP negotiation
+// based on local supported parameters and remote offered parameters. Both
+// |local_supported_params|, |remote_offered_params|, and |answer_params|
+// represent sendrecv media descriptions, i.e they are a mix of both encode and
+// decode capabilities. In theory, when the profile in |local_supported_params|
+// represent a strict superset of the profile in |remote_offered_params|, we
+// could limit the profile in |answer_params| to the profile in
+// |remote_offered_params|. However, to simplify the code, each supported H264
+// profile should be listed explicitly in the list of local supported codecs,
+// even if they are redundant. Then each local codec in the list should be
+// tested one at a time against the remote codec, and only when the profiles are
+// equal should this function be called. Therefore, this function does not need
+// to handle profile intersection, and the profile of |local_supported_params|
+// and |remote_offered_params| must be equal before calling this function. The
+// parameters that are used when negotiating are the level part of
+// profile-level-id and level-asymmetry-allowed.
+void GenerateProfileLevelIdForAnswer(
+ const CodecParameterMap& local_supported_params,
+ const CodecParameterMap& remote_offered_params,
+ CodecParameterMap* answer_params);
+
+// Returns true if the parameters have the same H264 profile, i.e. the same
+// H264::Profile (Baseline, High, etc).
+bool IsSameH264Profile(const CodecParameterMap& params1,
+ const CodecParameterMap& params2);
+
+} // namespace H264
+} // namespace webrtc
+
+#endif // MEDIA_BASE_H264_PROFILE_LEVEL_ID_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/mediachannel.h b/third_party/libwebrtc/webrtc/media/base/mediachannel.h
new file mode 100644
index 0000000000..4826b2dc4a
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/mediachannel.h
@@ -0,0 +1,1254 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_MEDIACHANNEL_H_
+#define MEDIA_BASE_MEDIACHANNEL_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/audio_codecs/audio_encoder.h"
+#include "api/optional.h"
+#include "api/rtpparameters.h"
+#include "api/rtpreceiverinterface.h"
+#include "api/video/video_timing.h"
+#include "call/video_config.h"
+#include "media/base/codec.h"
+#include "media/base/mediaconstants.h"
+#include "media/base/streamparams.h"
+#include "media/base/videosinkinterface.h"
+#include "media/base/videosourceinterface.h"
+#include "modules/audio_processing/include/audio_processing_statistics.h"
+#include "rtc_base/asyncpacketsocket.h"
+#include "rtc_base/basictypes.h"
+#include "rtc_base/buffer.h"
+#include "rtc_base/copyonwritebuffer.h"
+#include "rtc_base/dscp.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/networkroute.h"
+#include "rtc_base/sigslot.h"
+#include "rtc_base/socket.h"
+#include "rtc_base/window.h"
+
+
+namespace rtc {
+class RateLimiter;
+class Timing;
+}
+
+namespace webrtc {
+class AudioSinkInterface;
+class VideoFrame;
+}
+
+namespace cricket {
+
+class AudioSource;
+class VideoCapturer;
+struct RtpHeader;
+struct VideoFormat;
+
+const int kScreencastDefaultFps = 5;
+
+template <class T>
+static std::string ToStringIfSet(const char* key, const rtc::Optional<T>& val) {
+ std::string str;
+ if (val) {
+ str = key;
+ str += ": ";
+ str += val ? rtc::ToString(*val) : "";
+ str += ", ";
+ }
+ return str;
+}
+
+template <class T>
+static std::string VectorToString(const std::vector<T>& vals) {
+ std::ostringstream ost;
+ ost << "[";
+ for (size_t i = 0; i < vals.size(); ++i) {
+ if (i > 0) {
+ ost << ", ";
+ }
+ ost << vals[i].ToString();
+ }
+ ost << "]";
+ return ost.str();
+}
+
+// Construction-time settings, passed on when creating
+// MediaChannels.
+struct MediaConfig {
+ // Set DSCP value on packets. This flag comes from the
+ // PeerConnection constraint 'googDscp'.
+ bool enable_dscp = false;
+
+ // Video-specific config.
+ struct Video {
+ // Enable WebRTC CPU Overuse Detection. This flag comes from the
+ // PeerConnection constraint 'googCpuOveruseDetection'.
+ bool enable_cpu_overuse_detection = true;
+
+ // Enable WebRTC suspension of video. No video frames will be sent
+ // when the bitrate is below the configured minimum bitrate. This
+ // flag comes from the PeerConnection constraint
+ // 'googSuspendBelowMinBitrate', and WebRtcVideoChannel copies it
+ // to VideoSendStream::Config::suspend_below_min_bitrate.
+ bool suspend_below_min_bitrate = false;
+
+ // Set to true if the renderer has an algorithm of frame selection.
+ // If the value is true, then WebRTC will hand over a frame as soon as
+ // possible without delay, and rendering smoothness is completely the duty
+ // of the renderer;
+ // If the value is false, then WebRTC is responsible to delay frame release
+ // in order to increase rendering smoothness.
+ //
+ // This flag comes from PeerConnection's RtcConfiguration, but is
+ // currently only set by the command line flag
+ // 'disable-rtc-smoothness-algorithm'.
+ // WebRtcVideoChannel::AddRecvStream copies it to the created
+ // WebRtcVideoReceiveStream, where it is returned by the
+ // SmoothsRenderedFrames method. This method is used by the
+ // VideoReceiveStream, where the value is passed on to the
+ // IncomingVideoStream constructor.
+ bool disable_prerenderer_smoothing = false;
+
+ // Enables periodic bandwidth probing in application-limited region.
+ bool periodic_alr_bandwidth_probing = false;
+ } video;
+
+ bool operator==(const MediaConfig& o) const {
+ return enable_dscp == o.enable_dscp &&
+ video.enable_cpu_overuse_detection ==
+ o.video.enable_cpu_overuse_detection &&
+ video.suspend_below_min_bitrate ==
+ o.video.suspend_below_min_bitrate &&
+ video.disable_prerenderer_smoothing ==
+ o.video.disable_prerenderer_smoothing &&
+ video.periodic_alr_bandwidth_probing ==
+ o.video.periodic_alr_bandwidth_probing;
+ }
+
+ bool operator!=(const MediaConfig& o) const { return !(*this == o); }
+};
+
+// Options that can be applied to a VoiceMediaChannel or a VoiceMediaEngine.
+// Used to be flags, but that makes it hard to selectively apply options.
+// We are moving all of the setting of options to structs like this,
+// but some things currently still use flags.
+struct AudioOptions {
+ void SetAll(const AudioOptions& change) {
+ SetFrom(&echo_cancellation, change.echo_cancellation);
+ SetFrom(&auto_gain_control, change.auto_gain_control);
+ SetFrom(&noise_suppression, change.noise_suppression);
+ SetFrom(&highpass_filter, change.highpass_filter);
+ SetFrom(&stereo_swapping, change.stereo_swapping);
+ SetFrom(&audio_jitter_buffer_max_packets,
+ change.audio_jitter_buffer_max_packets);
+ SetFrom(&audio_jitter_buffer_fast_accelerate,
+ change.audio_jitter_buffer_fast_accelerate);
+ SetFrom(&typing_detection, change.typing_detection);
+ SetFrom(&aecm_generate_comfort_noise, change.aecm_generate_comfort_noise);
+ SetFrom(&adjust_agc_delta, change.adjust_agc_delta);
+ SetFrom(&experimental_agc, change.experimental_agc);
+ SetFrom(&extended_filter_aec, change.extended_filter_aec);
+ SetFrom(&delay_agnostic_aec, change.delay_agnostic_aec);
+ SetFrom(&experimental_ns, change.experimental_ns);
+ SetFrom(&intelligibility_enhancer, change.intelligibility_enhancer);
+ SetFrom(&level_control, change.level_control);
+ SetFrom(&residual_echo_detector, change.residual_echo_detector);
+ SetFrom(&tx_agc_target_dbov, change.tx_agc_target_dbov);
+ SetFrom(&tx_agc_digital_compression_gain,
+ change.tx_agc_digital_compression_gain);
+ SetFrom(&tx_agc_limiter, change.tx_agc_limiter);
+ SetFrom(&combined_audio_video_bwe, change.combined_audio_video_bwe);
+ SetFrom(&audio_network_adaptor, change.audio_network_adaptor);
+ SetFrom(&audio_network_adaptor_config, change.audio_network_adaptor_config);
+ SetFrom(&level_control_initial_peak_level_dbfs,
+ change.level_control_initial_peak_level_dbfs);
+ }
+
+ bool operator==(const AudioOptions& o) const {
+ return echo_cancellation == o.echo_cancellation &&
+ auto_gain_control == o.auto_gain_control &&
+ noise_suppression == o.noise_suppression &&
+ highpass_filter == o.highpass_filter &&
+ stereo_swapping == o.stereo_swapping &&
+ audio_jitter_buffer_max_packets ==
+ o.audio_jitter_buffer_max_packets &&
+ audio_jitter_buffer_fast_accelerate ==
+ o.audio_jitter_buffer_fast_accelerate &&
+ typing_detection == o.typing_detection &&
+ aecm_generate_comfort_noise == o.aecm_generate_comfort_noise &&
+ experimental_agc == o.experimental_agc &&
+ extended_filter_aec == o.extended_filter_aec &&
+ delay_agnostic_aec == o.delay_agnostic_aec &&
+ experimental_ns == o.experimental_ns &&
+ intelligibility_enhancer == o.intelligibility_enhancer &&
+ level_control == o.level_control &&
+ residual_echo_detector == o.residual_echo_detector &&
+ adjust_agc_delta == o.adjust_agc_delta &&
+ tx_agc_target_dbov == o.tx_agc_target_dbov &&
+ tx_agc_digital_compression_gain ==
+ o.tx_agc_digital_compression_gain &&
+ tx_agc_limiter == o.tx_agc_limiter &&
+ combined_audio_video_bwe == o.combined_audio_video_bwe &&
+ audio_network_adaptor == o.audio_network_adaptor &&
+ audio_network_adaptor_config == o.audio_network_adaptor_config &&
+ level_control_initial_peak_level_dbfs ==
+ o.level_control_initial_peak_level_dbfs;
+ }
+ bool operator!=(const AudioOptions& o) const { return !(*this == o); }
+
+ std::string ToString() const {
+ std::ostringstream ost;
+ ost << "AudioOptions {";
+ ost << ToStringIfSet("aec", echo_cancellation);
+ ost << ToStringIfSet("agc", auto_gain_control);
+ ost << ToStringIfSet("ns", noise_suppression);
+ ost << ToStringIfSet("hf", highpass_filter);
+ ost << ToStringIfSet("swap", stereo_swapping);
+ ost << ToStringIfSet("audio_jitter_buffer_max_packets",
+ audio_jitter_buffer_max_packets);
+ ost << ToStringIfSet("audio_jitter_buffer_fast_accelerate",
+ audio_jitter_buffer_fast_accelerate);
+ ost << ToStringIfSet("typing", typing_detection);
+ ost << ToStringIfSet("comfort_noise", aecm_generate_comfort_noise);
+ ost << ToStringIfSet("agc_delta", adjust_agc_delta);
+ ost << ToStringIfSet("experimental_agc", experimental_agc);
+ ost << ToStringIfSet("extended_filter_aec", extended_filter_aec);
+ ost << ToStringIfSet("delay_agnostic_aec", delay_agnostic_aec);
+ ost << ToStringIfSet("experimental_ns", experimental_ns);
+ ost << ToStringIfSet("intelligibility_enhancer", intelligibility_enhancer);
+ ost << ToStringIfSet("level_control", level_control);
+ ost << ToStringIfSet("level_control_initial_peak_level_dbfs",
+ level_control_initial_peak_level_dbfs);
+ ost << ToStringIfSet("residual_echo_detector", residual_echo_detector);
+ ost << ToStringIfSet("tx_agc_target_dbov", tx_agc_target_dbov);
+ ost << ToStringIfSet("tx_agc_digital_compression_gain",
+ tx_agc_digital_compression_gain);
+ ost << ToStringIfSet("tx_agc_limiter", tx_agc_limiter);
+ ost << ToStringIfSet("combined_audio_video_bwe", combined_audio_video_bwe);
+ ost << ToStringIfSet("audio_network_adaptor", audio_network_adaptor);
+ // The adaptor config is a serialized proto buffer and therefore not human
+ // readable. So we comment out the following line.
+ // ost << ToStringIfSet("audio_network_adaptor_config",
+ // audio_network_adaptor_config);
+ ost << "}";
+ return ost.str();
+ }
+
+ // Audio processing that attempts to filter away the output signal from
+ // later inbound pickup.
+ rtc::Optional<bool> echo_cancellation;
+ // Audio processing to adjust the sensitivity of the local mic dynamically.
+ rtc::Optional<bool> auto_gain_control;
+ // Audio processing to filter out background noise.
+ rtc::Optional<bool> noise_suppression;
+ // Audio processing to remove background noise of lower frequencies.
+ rtc::Optional<bool> highpass_filter;
+ // Audio processing to swap the left and right channels.
+ rtc::Optional<bool> stereo_swapping;
+ // Audio receiver jitter buffer (NetEq) max capacity in number of packets.
+ rtc::Optional<int> audio_jitter_buffer_max_packets;
+ // Audio receiver jitter buffer (NetEq) fast accelerate mode.
+ rtc::Optional<bool> audio_jitter_buffer_fast_accelerate;
+ // Audio processing to detect typing.
+ rtc::Optional<bool> typing_detection;
+ rtc::Optional<bool> aecm_generate_comfort_noise;
+ rtc::Optional<int> adjust_agc_delta;
+ rtc::Optional<bool> experimental_agc;
+ rtc::Optional<bool> extended_filter_aec;
+ rtc::Optional<bool> delay_agnostic_aec;
+ rtc::Optional<bool> experimental_ns;
+ rtc::Optional<bool> intelligibility_enhancer;
+ rtc::Optional<bool> level_control;
+ // Specifies an optional initialization value for the level controller.
+ rtc::Optional<float> level_control_initial_peak_level_dbfs;
+ // Note that tx_agc_* only applies to non-experimental AGC.
+ rtc::Optional<bool> residual_echo_detector;
+ rtc::Optional<uint16_t> tx_agc_target_dbov;
+ rtc::Optional<uint16_t> tx_agc_digital_compression_gain;
+ rtc::Optional<bool> tx_agc_limiter;
+ // Enable combined audio+bandwidth BWE.
+ // TODO(pthatcher): This flag is set from the
+ // "googCombinedAudioVideoBwe", but not used anywhere. So delete it,
+ // and check if any other AudioOptions members are unused.
+ rtc::Optional<bool> combined_audio_video_bwe;
+ // Enable audio network adaptor.
+ rtc::Optional<bool> audio_network_adaptor;
+ // Config string for audio network adaptor.
+ rtc::Optional<std::string> audio_network_adaptor_config;
+
+ private:
+ template <typename T>
+ static void SetFrom(rtc::Optional<T>* s, const rtc::Optional<T>& o) {
+ if (o) {
+ *s = o;
+ }
+ }
+};
+
+// Options that can be applied to a VideoMediaChannel or a VideoMediaEngine.
+// Used to be flags, but that makes it hard to selectively apply options.
+// We are moving all of the setting of options to structs like this,
+// but some things currently still use flags.
+struct VideoOptions {
+ void SetAll(const VideoOptions& change) {
+ SetFrom(&video_noise_reduction, change.video_noise_reduction);
+ SetFrom(&screencast_min_bitrate_kbps, change.screencast_min_bitrate_kbps);
+ SetFrom(&is_screencast, change.is_screencast);
+ }
+
+ bool operator==(const VideoOptions& o) const {
+ return video_noise_reduction == o.video_noise_reduction &&
+ screencast_min_bitrate_kbps == o.screencast_min_bitrate_kbps &&
+ is_screencast == o.is_screencast;
+ }
+ bool operator!=(const VideoOptions& o) const { return !(*this == o); }
+
+ std::string ToString() const {
+ std::ostringstream ost;
+ ost << "VideoOptions {";
+ ost << ToStringIfSet("noise reduction", video_noise_reduction);
+ ost << ToStringIfSet("screencast min bitrate kbps",
+ screencast_min_bitrate_kbps);
+ ost << ToStringIfSet("is_screencast ", is_screencast);
+ ost << "}";
+ return ost.str();
+ }
+
+ // Enable denoising? This flag comes from the getUserMedia
+ // constraint 'googNoiseReduction', and WebRtcVideoEngine passes it
+ // on to the codec options. Disabled by default.
+ rtc::Optional<bool> video_noise_reduction;
+ // Force screencast to use a minimum bitrate. This flag comes from
+ // the PeerConnection constraint 'googScreencastMinBitrate'. It is
+ // copied to the encoder config by WebRtcVideoChannel.
+ rtc::Optional<int> screencast_min_bitrate_kbps;
+ // Set by screencast sources. Implies selection of encoding settings
+ // suitable for screencast. Most likely not the right way to do
+ // things, e.g., screencast of a text document and screencast of a
+ // youtube video have different needs.
+ rtc::Optional<bool> is_screencast;
+
+ private:
+ template <typename T>
+ static void SetFrom(rtc::Optional<T>* s, const rtc::Optional<T>& o) {
+ if (o) {
+ *s = o;
+ }
+ }
+};
+
+// TODO(isheriff): Remove this once client usage is fixed to use RtpExtension.
+struct RtpHeaderExtension {
+ RtpHeaderExtension() : id(0) {}
+ RtpHeaderExtension(const std::string& uri, int id) : uri(uri), id(id) {}
+
+ std::string ToString() const {
+ std::ostringstream ost;
+ ost << "{";
+ ost << "uri: " << uri;
+ ost << ", id: " << id;
+ ost << "}";
+ return ost.str();
+ }
+
+ std::string uri;
+ int id;
+};
+
+class MediaChannel : public sigslot::has_slots<> {
+ public:
+ class NetworkInterface {
+ public:
+ enum SocketType { ST_RTP, ST_RTCP };
+ virtual bool SendPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) = 0;
+ virtual bool SendRtcp(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) = 0;
+ virtual int SetOption(SocketType type, rtc::Socket::Option opt,
+ int option) = 0;
+ virtual ~NetworkInterface() {}
+ };
+
+ explicit MediaChannel(const MediaConfig& config)
+ : enable_dscp_(config.enable_dscp), network_interface_(NULL) {}
+ MediaChannel() : enable_dscp_(false), network_interface_(NULL) {}
+ virtual ~MediaChannel() {}
+
+ // Sets the abstract interface class for sending RTP/RTCP data.
+ virtual void SetInterface(NetworkInterface *iface) {
+ rtc::CritScope cs(&network_interface_crit_);
+ network_interface_ = iface;
+ SetDscp(enable_dscp_ ? PreferredDscp() : rtc::DSCP_DEFAULT);
+ }
+ virtual rtc::DiffServCodePoint PreferredDscp() const {
+ return rtc::DSCP_DEFAULT;
+ }
+ // Called when a RTP packet is received.
+ virtual void OnPacketReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) = 0;
+ // Called when a RTCP packet is received.
+ virtual void OnRtcpReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) = 0;
+ // Called when the socket's ability to send has changed.
+ virtual void OnReadyToSend(bool ready) = 0;
+ // Called when the network route used for sending packets changed.
+ virtual void OnNetworkRouteChanged(
+ const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) = 0;
+ // Creates a new outgoing media stream with SSRCs and CNAME as described
+ // by sp.
+ virtual bool AddSendStream(const StreamParams& sp) = 0;
+ // Removes an outgoing media stream.
+ // ssrc must be the first SSRC of the media stream if the stream uses
+ // multiple SSRCs.
+ virtual bool RemoveSendStream(uint32_t ssrc) = 0;
+ // Creates a new incoming media stream with SSRCs and CNAME as described
+ // by sp.
+ virtual bool AddRecvStream(const StreamParams& sp) = 0;
+ // Removes an incoming media stream.
+ // ssrc must be the first SSRC of the media stream if the stream uses
+ // multiple SSRCs.
+ virtual bool RemoveRecvStream(uint32_t ssrc) = 0;
+
+ // Returns the absoulte sendtime extension id value from media channel.
+ virtual int GetRtpSendTimeExtnId() const {
+ return -1;
+ }
+
+ // Base method to send packet using NetworkInterface.
+ bool SendPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) {
+ return DoSendPacket(packet, false, options);
+ }
+
+ bool SendRtcp(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) {
+ return DoSendPacket(packet, true, options);
+ }
+
+ int SetOption(NetworkInterface::SocketType type,
+ rtc::Socket::Option opt,
+ int option) {
+ rtc::CritScope cs(&network_interface_crit_);
+ if (!network_interface_)
+ return -1;
+
+ return network_interface_->SetOption(type, opt, option);
+ }
+
+ private:
+ // This method sets DSCP |value| on both RTP and RTCP channels.
+ int SetDscp(rtc::DiffServCodePoint value) {
+ int ret;
+ ret = SetOption(NetworkInterface::ST_RTP,
+ rtc::Socket::OPT_DSCP,
+ value);
+ if (ret == 0) {
+ ret = SetOption(NetworkInterface::ST_RTCP,
+ rtc::Socket::OPT_DSCP,
+ value);
+ }
+ return ret;
+ }
+
+ bool DoSendPacket(rtc::CopyOnWriteBuffer* packet,
+ bool rtcp,
+ const rtc::PacketOptions& options) {
+ rtc::CritScope cs(&network_interface_crit_);
+ if (!network_interface_)
+ return false;
+
+ return (!rtcp) ? network_interface_->SendPacket(packet, options)
+ : network_interface_->SendRtcp(packet, options);
+ }
+
+ const bool enable_dscp_;
+ // |network_interface_| can be accessed from the worker_thread and
+ // from any MediaEngine threads. This critical section is to protect accessing
+ // of network_interface_ object.
+ rtc::CriticalSection network_interface_crit_;
+ NetworkInterface* network_interface_;
+};
+
+// The stats information is structured as follows:
+// Media are represented by either MediaSenderInfo or MediaReceiverInfo.
+// Media contains a vector of SSRC infos that are exclusively used by this
+// media. (SSRCs shared between media streams can't be represented.)
+
+// Information about an SSRC.
+// This data may be locally recorded, or received in an RTCP SR or RR.
+struct SsrcSenderInfo {
+ SsrcSenderInfo()
+ : ssrc(0),
+ timestamp(0) {
+ }
+ uint32_t ssrc;
+ double timestamp; // NTP timestamp, represented as seconds since epoch.
+};
+
+struct SsrcReceiverInfo {
+ SsrcReceiverInfo()
+ : ssrc(0),
+ timestamp(0) {
+ }
+ uint32_t ssrc;
+ double timestamp;
+};
+
+struct MediaSenderInfo {
+ MediaSenderInfo()
+ : bytes_sent(0),
+ packets_sent(0),
+ packets_lost(0),
+ fraction_lost(0.0),
+ rtt_ms(0) {
+ }
+ void add_ssrc(const SsrcSenderInfo& stat) {
+ local_stats.push_back(stat);
+ }
+ // Temporary utility function for call sites that only provide SSRC.
+ // As more info is added into SsrcSenderInfo, this function should go away.
+ void add_ssrc(uint32_t ssrc) {
+ SsrcSenderInfo stat;
+ stat.ssrc = ssrc;
+ add_ssrc(stat);
+ }
+ // Utility accessor for clients that are only interested in ssrc numbers.
+ std::vector<uint32_t> ssrcs() const {
+ std::vector<uint32_t> retval;
+ for (std::vector<SsrcSenderInfo>::const_iterator it = local_stats.begin();
+ it != local_stats.end(); ++it) {
+ retval.push_back(it->ssrc);
+ }
+ return retval;
+ }
+ // Utility accessor for clients that make the assumption only one ssrc
+ // exists per media.
+ // This will eventually go away.
+ uint32_t ssrc() const {
+ if (local_stats.size() > 0) {
+ return local_stats[0].ssrc;
+ } else {
+ return 0;
+ }
+ }
+ int64_t bytes_sent;
+ int packets_sent;
+ int packets_lost;
+ float fraction_lost;
+ int64_t rtt_ms;
+ std::string codec_name;
+ rtc::Optional<int> codec_payload_type;
+ std::vector<SsrcSenderInfo> local_stats;
+ std::vector<SsrcReceiverInfo> remote_stats;
+};
+
+struct MediaReceiverInfo {
+ MediaReceiverInfo()
+ : bytes_rcvd(0),
+ packets_rcvd(0),
+ packets_lost(0),
+ fraction_lost(0.0) {
+ }
+ void add_ssrc(const SsrcReceiverInfo& stat) {
+ local_stats.push_back(stat);
+ }
+ // Temporary utility function for call sites that only provide SSRC.
+ // As more info is added into SsrcSenderInfo, this function should go away.
+ void add_ssrc(uint32_t ssrc) {
+ SsrcReceiverInfo stat;
+ stat.ssrc = ssrc;
+ add_ssrc(stat);
+ }
+ std::vector<uint32_t> ssrcs() const {
+ std::vector<uint32_t> retval;
+ for (std::vector<SsrcReceiverInfo>::const_iterator it = local_stats.begin();
+ it != local_stats.end(); ++it) {
+ retval.push_back(it->ssrc);
+ }
+ return retval;
+ }
+ // Utility accessor for clients that make the assumption only one ssrc
+ // exists per media.
+ // This will eventually go away.
+ uint32_t ssrc() const {
+ if (local_stats.size() > 0) {
+ return local_stats[0].ssrc;
+ } else {
+ return 0;
+ }
+ }
+
+ int64_t bytes_rcvd;
+ int packets_rcvd;
+ int packets_lost;
+ float fraction_lost;
+ std::string codec_name;
+ rtc::Optional<int> codec_payload_type;
+ std::vector<SsrcReceiverInfo> local_stats;
+ std::vector<SsrcSenderInfo> remote_stats;
+};
+
+struct VoiceSenderInfo : public MediaSenderInfo {
+ VoiceSenderInfo()
+ : ext_seqnum(0),
+ jitter_ms(0),
+ audio_level(0),
+ total_input_energy(0.0),
+ total_input_duration(0.0),
+ aec_quality_min(0.0),
+ echo_delay_median_ms(0),
+ echo_delay_std_ms(0),
+ echo_return_loss(0),
+ echo_return_loss_enhancement(0),
+ residual_echo_likelihood(0.0f),
+ residual_echo_likelihood_recent_max(0.0f),
+ typing_noise_detected(false) {}
+
+ int ext_seqnum;
+ int jitter_ms;
+ int audio_level;
+ // See description of "totalAudioEnergy" in the WebRTC stats spec:
+ // https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats-totalaudioenergy
+ double total_input_energy;
+ double total_input_duration;
+ // TODO(bugs.webrtc.org/8572): Remove APM stats from this struct, since they
+ // are no longer needed now that we have apm_statistics.
+ float aec_quality_min;
+ int echo_delay_median_ms;
+ int echo_delay_std_ms;
+ int echo_return_loss;
+ int echo_return_loss_enhancement;
+ float residual_echo_likelihood;
+ float residual_echo_likelihood_recent_max;
+ bool typing_noise_detected;
+ webrtc::ANAStats ana_statistics;
+ webrtc::AudioProcessingStats apm_statistics;
+};
+
+struct VoiceReceiverInfo : public MediaReceiverInfo {
+ VoiceReceiverInfo()
+ : ext_seqnum(0),
+ jitter_ms(0),
+ jitter_buffer_ms(0),
+ jitter_buffer_preferred_ms(0),
+ delay_estimate_ms(0),
+ audio_level(0),
+ total_output_energy(0.0),
+ total_samples_received(0),
+ total_output_duration(0.0),
+ concealed_samples(0),
+ concealment_events(0),
+ jitter_buffer_delay_seconds(0),
+ expand_rate(0),
+ speech_expand_rate(0),
+ secondary_decoded_rate(0),
+ secondary_discarded_rate(0),
+ accelerate_rate(0),
+ preemptive_expand_rate(0),
+ decoding_calls_to_silence_generator(0),
+ decoding_calls_to_neteq(0),
+ decoding_normal(0),
+ decoding_plc(0),
+ decoding_cng(0),
+ decoding_plc_cng(0),
+ decoding_muted_output(0),
+ capture_start_ntp_time_ms(-1) {}
+
+ int ext_seqnum;
+ int jitter_ms;
+ int jitter_buffer_ms;
+ int jitter_buffer_preferred_ms;
+ int delay_estimate_ms;
+ int audio_level;
+ // Stats below correspond to similarly-named fields in the WebRTC stats spec.
+ // https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats
+ double total_output_energy;
+ uint64_t total_samples_received;
+ double total_output_duration;
+ uint64_t concealed_samples;
+ uint64_t concealment_events;
+ double jitter_buffer_delay_seconds;
+ // Stats below DO NOT correspond directly to anything in the WebRTC stats
+ // fraction of synthesized audio inserted through expansion.
+ float expand_rate;
+ // fraction of synthesized speech inserted through expansion.
+ float speech_expand_rate;
+ // fraction of data out of secondary decoding, including FEC and RED.
+ float secondary_decoded_rate;
+ // Fraction of secondary data, including FEC and RED, that is discarded.
+ // Discarding of secondary data can be caused by the reception of the primary
+ // data, obsoleting the secondary data. It can also be caused by early
+ // or late arrival of secondary data. This metric is the percentage of
+ // discarded secondary data since last query of receiver info.
+ float secondary_discarded_rate;
+ // Fraction of data removed through time compression.
+ float accelerate_rate;
+ // Fraction of data inserted through time stretching.
+ float preemptive_expand_rate;
+ int decoding_calls_to_silence_generator;
+ int decoding_calls_to_neteq;
+ int decoding_normal;
+ int decoding_plc;
+ int decoding_cng;
+ int decoding_plc_cng;
+ int decoding_muted_output;
+ // Estimated capture start time in NTP time in ms.
+ int64_t capture_start_ntp_time_ms;
+};
+
+struct VideoSenderInfo : public MediaSenderInfo {
+ VideoSenderInfo()
+ : packets_cached(0),
+ firs_rcvd(0),
+ plis_rcvd(0),
+ nacks_rcvd(0),
+ send_frame_width(0),
+ send_frame_height(0),
+ framerate_input(0),
+ framerate_sent(0),
+ nominal_bitrate(0),
+ preferred_bitrate(0),
+ adapt_reason(0),
+ adapt_changes(0),
+ avg_encode_ms(0),
+ encode_usage_percent(0),
+ frames_encoded(0),
+ has_entered_low_resolution(false),
+ content_type(webrtc::VideoContentType::UNSPECIFIED) {}
+
+ std::vector<SsrcGroup> ssrc_groups;
+ // TODO(hbos): Move this to |VideoMediaInfo::send_codecs|?
+ std::string encoder_implementation_name;
+ int packets_cached;
+ int firs_rcvd;
+ int plis_rcvd;
+ int nacks_rcvd;
+ int send_frame_width;
+ int send_frame_height;
+ int framerate_input;
+ int framerate_sent;
+ int nominal_bitrate;
+ int preferred_bitrate;
+ int adapt_reason;
+ int adapt_changes;
+ int avg_encode_ms;
+ int encode_usage_percent;
+ uint32_t frames_encoded;
+ bool has_entered_low_resolution;
+ rtc::Optional<uint64_t> qp_sum;
+ webrtc::VideoContentType content_type;
+};
+
+struct VideoReceiverInfo : public MediaReceiverInfo {
+ VideoReceiverInfo()
+ : packets_concealed(0),
+ firs_sent(0),
+ plis_sent(0),
+ nacks_sent(0),
+ frame_width(0),
+ frame_height(0),
+ framerate_rcvd(0),
+ framerate_decoded(0),
+ framerate_output(0),
+ framerate_render_input(0),
+ framerate_render_output(0),
+ frames_received(0),
+ frames_decoded(0),
+ frames_rendered(0),
+ interframe_delay_max_ms(-1),
+ content_type(webrtc::VideoContentType::UNSPECIFIED),
+ decode_ms(0),
+ max_decode_ms(0),
+ jitter_buffer_ms(0),
+ min_playout_delay_ms(0),
+ render_delay_ms(0),
+ target_delay_ms(0),
+ current_delay_ms(0),
+ capture_start_ntp_time_ms(-1) {}
+
+ std::vector<SsrcGroup> ssrc_groups;
+ // TODO(hbos): Move this to |VideoMediaInfo::receive_codecs|?
+ std::string decoder_implementation_name;
+ int packets_concealed;
+ int firs_sent;
+ int plis_sent;
+ int nacks_sent;
+ int frame_width;
+ int frame_height;
+ int framerate_rcvd;
+ int framerate_decoded;
+ int framerate_output;
+ // Framerate as sent to the renderer.
+ int framerate_render_input;
+ // Framerate that the renderer reports.
+ int framerate_render_output;
+ uint32_t frames_received;
+ uint32_t frames_decoded;
+ uint32_t frames_rendered;
+ rtc::Optional<uint64_t> qp_sum;
+ int64_t interframe_delay_max_ms;
+
+ webrtc::VideoContentType content_type;
+
+ // All stats below are gathered per-VideoReceiver, but some will be correlated
+ // across MediaStreamTracks. NOTE(hta): when sinking stats into per-SSRC
+ // structures, reflect this in the new layout.
+
+ // Current frame decode latency.
+ int decode_ms;
+ // Maximum observed frame decode latency.
+ int max_decode_ms;
+ // Jitter (network-related) latency.
+ int jitter_buffer_ms;
+ // Requested minimum playout latency.
+ int min_playout_delay_ms;
+ // Requested latency to account for rendering delay.
+ int render_delay_ms;
+ // Target overall delay: network+decode+render, accounting for
+ // min_playout_delay_ms.
+ int target_delay_ms;
+ // Current overall delay, possibly ramping towards target_delay_ms.
+ int current_delay_ms;
+
+ // Estimated capture start time in NTP time in ms.
+ int64_t capture_start_ntp_time_ms;
+
+ // Timing frame info: all important timestamps for a full lifetime of a
+ // single 'timing frame'.
+ rtc::Optional<webrtc::TimingFrameInfo> timing_frame_info;
+};
+
+struct DataSenderInfo : public MediaSenderInfo {
+ DataSenderInfo()
+ : ssrc(0) {
+ }
+
+ uint32_t ssrc;
+};
+
+struct DataReceiverInfo : public MediaReceiverInfo {
+ DataReceiverInfo()
+ : ssrc(0) {
+ }
+
+ uint32_t ssrc;
+};
+
+struct BandwidthEstimationInfo {
+ BandwidthEstimationInfo()
+ : available_send_bandwidth(0),
+ available_recv_bandwidth(0),
+ target_enc_bitrate(0),
+ actual_enc_bitrate(0),
+ retransmit_bitrate(0),
+ transmit_bitrate(0),
+ bucket_delay(0) {
+ }
+
+ int available_send_bandwidth;
+ int available_recv_bandwidth;
+ int target_enc_bitrate;
+ int actual_enc_bitrate;
+ int retransmit_bitrate;
+ int transmit_bitrate;
+ int64_t bucket_delay;
+};
+
+// Maps from payload type to |RtpCodecParameters|.
+typedef std::map<int, webrtc::RtpCodecParameters> RtpCodecParametersMap;
+
+struct VoiceMediaInfo {
+ void Clear() {
+ senders.clear();
+ receivers.clear();
+ send_codecs.clear();
+ receive_codecs.clear();
+ }
+ std::vector<VoiceSenderInfo> senders;
+ std::vector<VoiceReceiverInfo> receivers;
+ RtpCodecParametersMap send_codecs;
+ RtpCodecParametersMap receive_codecs;
+};
+
+struct VideoMediaInfo {
+ void Clear() {
+ senders.clear();
+ receivers.clear();
+ bw_estimations.clear();
+ send_codecs.clear();
+ receive_codecs.clear();
+ }
+ std::vector<VideoSenderInfo> senders;
+ std::vector<VideoReceiverInfo> receivers;
+ // Deprecated.
+ // TODO(holmer): Remove once upstream projects no longer use this.
+ std::vector<BandwidthEstimationInfo> bw_estimations;
+ RtpCodecParametersMap send_codecs;
+ RtpCodecParametersMap receive_codecs;
+};
+
+struct DataMediaInfo {
+ void Clear() {
+ senders.clear();
+ receivers.clear();
+ }
+ std::vector<DataSenderInfo> senders;
+ std::vector<DataReceiverInfo> receivers;
+};
+
+struct RtcpParameters {
+ bool reduced_size = false;
+};
+
+template <class Codec>
+struct RtpParameters {
+ virtual std::string ToString() const {
+ std::ostringstream ost;
+ ost << "{";
+ ost << "codecs: " << VectorToString(codecs) << ", ";
+ ost << "extensions: " << VectorToString(extensions);
+ ost << "}";
+ return ost.str();
+ }
+
+ std::vector<Codec> codecs;
+ std::vector<webrtc::RtpExtension> extensions;
+ // TODO(pthatcher): Add streams.
+ RtcpParameters rtcp;
+ virtual ~RtpParameters() = default;
+};
+
+// TODO(deadbeef): Rename to RtpSenderParameters, since they're intended to
+// encapsulate all the parameters needed for an RtpSender.
+template <class Codec>
+struct RtpSendParameters : RtpParameters<Codec> {
+ std::string ToString() const override {
+ std::ostringstream ost;
+ ost << "{";
+ ost << "codecs: " << VectorToString(this->codecs) << ", ";
+ ost << "extensions: " << VectorToString(this->extensions) << ", ";
+ ost << "max_bandwidth_bps: " << max_bandwidth_bps << ", ";
+ ost << "}";
+ return ost.str();
+ }
+
+ int max_bandwidth_bps = -1;
+};
+
+struct AudioSendParameters : RtpSendParameters<AudioCodec> {
+ std::string ToString() const override {
+ std::ostringstream ost;
+ ost << "{";
+ ost << "codecs: " << VectorToString(this->codecs) << ", ";
+ ost << "extensions: " << VectorToString(this->extensions) << ", ";
+ ost << "max_bandwidth_bps: " << max_bandwidth_bps << ", ";
+ ost << "options: " << options.ToString();
+ ost << "}";
+ return ost.str();
+ }
+
+ AudioOptions options;
+};
+
+struct AudioRecvParameters : RtpParameters<AudioCodec> {
+};
+
+class VoiceMediaChannel : public MediaChannel {
+ public:
+ enum Error {
+ ERROR_NONE = 0, // No error.
+ ERROR_OTHER, // Other errors.
+ ERROR_REC_DEVICE_OPEN_FAILED = 100, // Could not open mic.
+ ERROR_REC_DEVICE_MUTED, // Mic was muted by OS.
+ ERROR_REC_DEVICE_SILENT, // No background noise picked up.
+ ERROR_REC_DEVICE_SATURATION, // Mic input is clipping.
+ ERROR_REC_DEVICE_REMOVED, // Mic was removed while active.
+ ERROR_REC_RUNTIME_ERROR, // Processing is encountering errors.
+ ERROR_REC_SRTP_ERROR, // Generic SRTP failure.
+ ERROR_REC_SRTP_AUTH_FAILED, // Failed to authenticate packets.
+ ERROR_REC_TYPING_NOISE_DETECTED, // Typing noise is detected.
+ ERROR_PLAY_DEVICE_OPEN_FAILED = 200, // Could not open playout.
+ ERROR_PLAY_DEVICE_MUTED, // Playout muted by OS.
+ ERROR_PLAY_DEVICE_REMOVED, // Playout removed while active.
+ ERROR_PLAY_RUNTIME_ERROR, // Errors in voice processing.
+ ERROR_PLAY_SRTP_ERROR, // Generic SRTP failure.
+ ERROR_PLAY_SRTP_AUTH_FAILED, // Failed to authenticate packets.
+ ERROR_PLAY_SRTP_REPLAY, // Packet replay detected.
+ };
+
+ VoiceMediaChannel() {}
+ explicit VoiceMediaChannel(const MediaConfig& config)
+ : MediaChannel(config) {}
+ virtual ~VoiceMediaChannel() {}
+ virtual bool SetSendParameters(const AudioSendParameters& params) = 0;
+ virtual bool SetRecvParameters(const AudioRecvParameters& params) = 0;
+ virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0;
+ virtual bool SetRtpSendParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) = 0;
+ // Get the receive parameters for the incoming stream identified by |ssrc|.
+ // If |ssrc| is 0, retrieve the receive parameters for the default receive
+ // stream, which is used when SSRCs are not signaled. Note that calling with
+ // an |ssrc| of 0 will return encoding parameters with an unset |ssrc|
+ // member.
+ virtual webrtc::RtpParameters GetRtpReceiveParameters(
+ uint32_t ssrc) const = 0;
+ virtual bool SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) = 0;
+ // Starts or stops playout of received audio.
+ virtual void SetPlayout(bool playout) = 0;
+ // Starts or stops sending (and potentially capture) of local audio.
+ virtual void SetSend(bool send) = 0;
+ // Configure stream for sending.
+ virtual bool SetAudioSend(uint32_t ssrc,
+ bool enable,
+ const AudioOptions* options,
+ AudioSource* source) = 0;
+ // Gets current energy levels for all incoming streams.
+ typedef std::vector<std::pair<uint32_t, int>> StreamList;
+ virtual bool GetActiveStreams(StreamList* actives) = 0;
+ // Get the current energy level of the stream sent to the speaker.
+ virtual int GetOutputLevel() = 0;
+ // Set speaker output volume of the specified ssrc.
+ virtual bool SetOutputVolume(uint32_t ssrc, double volume) = 0;
+ // Returns if the telephone-event has been negotiated.
+ virtual bool CanInsertDtmf() = 0;
+ // Send a DTMF |event|. The DTMF out-of-band signal will be used.
+ // The |ssrc| should be either 0 or a valid send stream ssrc.
+ // The valid value for the |event| are 0 to 15 which corresponding to
+ // DTMF event 0-9, *, #, A-D.
+ virtual bool InsertDtmf(uint32_t ssrc, int event, int duration) = 0;
+ // Gets quality stats for the channel.
+ virtual bool GetStats(VoiceMediaInfo* info) = 0;
+
+ virtual void SetRawAudioSink(
+ uint32_t ssrc,
+ std::unique_ptr<webrtc::AudioSinkInterface> sink) = 0;
+
+ virtual std::vector<webrtc::RtpSource> GetSources(uint32_t ssrc) const = 0;
+};
+
+// TODO(deadbeef): Rename to VideoSenderParameters, since they're intended to
+// encapsulate all the parameters needed for a video RtpSender.
+struct VideoSendParameters : RtpSendParameters<VideoCodec> {
+ // Use conference mode? This flag comes from the remote
+ // description's SDP line 'a=x-google-flag:conference', copied over
+ // by VideoChannel::SetRemoteContent_w, and ultimately used by
+ // conference mode screencast logic in
+ // WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig.
+ // The special screencast behaviour is disabled by default.
+ bool conference_mode = false;
+};
+
+// TODO(deadbeef): Rename to VideoReceiverParameters, since they're intended to
+// encapsulate all the parameters needed for a video RtpReceiver.
+struct VideoRecvParameters : RtpParameters<VideoCodec> {
+};
+
+class VideoMediaChannel : public MediaChannel {
+ public:
+ enum Error {
+ ERROR_NONE = 0, // No error.
+ ERROR_OTHER, // Other errors.
+ ERROR_REC_DEVICE_OPEN_FAILED = 100, // Could not open camera.
+ ERROR_REC_DEVICE_NO_DEVICE, // No camera.
+ ERROR_REC_DEVICE_IN_USE, // Device is in already use.
+ ERROR_REC_DEVICE_REMOVED, // Device is removed.
+ ERROR_REC_SRTP_ERROR, // Generic sender SRTP failure.
+ ERROR_REC_SRTP_AUTH_FAILED, // Failed to authenticate packets.
+ ERROR_REC_CPU_MAX_CANT_DOWNGRADE, // Can't downgrade capture anymore.
+ ERROR_PLAY_SRTP_ERROR = 200, // Generic receiver SRTP failure.
+ ERROR_PLAY_SRTP_AUTH_FAILED, // Failed to authenticate packets.
+ ERROR_PLAY_SRTP_REPLAY, // Packet replay detected.
+ };
+
+ VideoMediaChannel() {}
+ explicit VideoMediaChannel(const MediaConfig& config)
+ : MediaChannel(config) {}
+ virtual ~VideoMediaChannel() {}
+
+ virtual bool SetSendParameters(const VideoSendParameters& params) = 0;
+ virtual bool SetRecvParameters(const VideoRecvParameters& params) = 0;
+ virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0;
+ virtual bool SetRtpSendParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) = 0;
+ // Get the receive parameters for the incoming stream identified by |ssrc|.
+ // If |ssrc| is 0, retrieve the receive parameters for the default receive
+ // stream, which is used when SSRCs are not signaled. Note that calling with
+ // an |ssrc| of 0 will return encoding parameters with an unset |ssrc|
+ // member.
+ virtual webrtc::RtpParameters GetRtpReceiveParameters(
+ uint32_t ssrc) const = 0;
+ virtual bool SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) = 0;
+ // Gets the currently set codecs/payload types to be used for outgoing media.
+ virtual bool GetSendCodec(VideoCodec* send_codec) = 0;
+ // Starts or stops transmission (and potentially capture) of local video.
+ virtual bool SetSend(bool send) = 0;
+ // Configure stream for sending and register a source.
+ // The |ssrc| must correspond to a registered send stream.
+ virtual bool SetVideoSend(
+ uint32_t ssrc,
+ bool enable,
+ const VideoOptions* options,
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source) = 0;
+ // Sets the sink object to be used for the specified stream.
+ // If SSRC is 0, the sink is used for the 'default' stream.
+ virtual bool SetSink(uint32_t ssrc,
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) = 0;
+ // This fills the "bitrate parts" (rtx, video bitrate) of the
+ // BandwidthEstimationInfo, since that part that isn't possible to get
+ // through webrtc::Call::GetStats, as they are statistics of the send
+ // streams.
+ // TODO(holmer): We should change this so that either BWE graphs doesn't
+ // need access to bitrates of the streams, or change the (RTC)StatsCollector
+ // so that it's getting the send stream stats separately by calling
+ // GetStats(), and merges with BandwidthEstimationInfo by itself.
+ virtual void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) = 0;
+ // Gets quality stats for the channel.
+ virtual bool GetStats(VideoMediaInfo* info) = 0;
+};
+
+enum DataMessageType {
+ // Chrome-Internal use only. See SctpDataMediaChannel for the actual PPID
+ // values.
+ DMT_NONE = 0,
+ DMT_CONTROL = 1,
+ DMT_BINARY = 2,
+ DMT_TEXT = 3,
+};
+
+// Info about data received in DataMediaChannel. For use in
+// DataMediaChannel::SignalDataReceived and in all of the signals that
+// signal fires, on up the chain.
+struct ReceiveDataParams {
+ // The in-packet stream indentifier.
+ // RTP data channels use SSRCs, SCTP data channels use SIDs.
+ union {
+ uint32_t ssrc;
+ int sid;
+ };
+ // The type of message (binary, text, or control).
+ DataMessageType type;
+ // A per-stream value incremented per packet in the stream.
+ int seq_num;
+ // A per-stream value monotonically increasing with time.
+ int timestamp;
+
+ ReceiveDataParams() : sid(0), type(DMT_TEXT), seq_num(0), timestamp(0) {}
+};
+
+struct SendDataParams {
+ // The in-packet stream indentifier.
+ // RTP data channels use SSRCs, SCTP data channels use SIDs.
+ union {
+ uint32_t ssrc;
+ int sid;
+ };
+ // The type of message (binary, text, or control).
+ DataMessageType type;
+
+ // For SCTP, whether to send messages flagged as ordered or not.
+ // If false, messages can be received out of order.
+ bool ordered;
+ // For SCTP, whether the messages are sent reliably or not.
+ // If false, messages may be lost.
+ bool reliable;
+ // For SCTP, if reliable == false, provide partial reliability by
+ // resending up to this many times. Either count or millis
+ // is supported, not both at the same time.
+ int max_rtx_count;
+ // For SCTP, if reliable == false, provide partial reliability by
+ // resending for up to this many milliseconds. Either count or millis
+ // is supported, not both at the same time.
+ int max_rtx_ms;
+
+ SendDataParams()
+ : sid(0),
+ type(DMT_TEXT),
+ // TODO(pthatcher): Make these true by default?
+ ordered(false),
+ reliable(false),
+ max_rtx_count(0),
+ max_rtx_ms(0) {}
+};
+
+enum SendDataResult { SDR_SUCCESS, SDR_ERROR, SDR_BLOCK };
+
+struct DataSendParameters : RtpSendParameters<DataCodec> {
+ std::string ToString() const {
+ std::ostringstream ost;
+ // Options and extensions aren't used.
+ ost << "{";
+ ost << "codecs: " << VectorToString(codecs) << ", ";
+ ost << "max_bandwidth_bps: " << max_bandwidth_bps;
+ ost << "}";
+ return ost.str();
+ }
+};
+
+struct DataRecvParameters : RtpParameters<DataCodec> {
+};
+
+class DataMediaChannel : public MediaChannel {
+ public:
+ enum Error {
+ ERROR_NONE = 0, // No error.
+ ERROR_OTHER, // Other errors.
+ ERROR_SEND_SRTP_ERROR = 200, // Generic SRTP failure.
+ ERROR_SEND_SRTP_AUTH_FAILED, // Failed to authenticate packets.
+ ERROR_RECV_SRTP_ERROR, // Generic SRTP failure.
+ ERROR_RECV_SRTP_AUTH_FAILED, // Failed to authenticate packets.
+ ERROR_RECV_SRTP_REPLAY, // Packet replay detected.
+ };
+
+ DataMediaChannel() {}
+ explicit DataMediaChannel(const MediaConfig& config) : MediaChannel(config) {}
+ virtual ~DataMediaChannel() {}
+
+ virtual bool SetSendParameters(const DataSendParameters& params) = 0;
+ virtual bool SetRecvParameters(const DataRecvParameters& params) = 0;
+
+ // TODO(pthatcher): Implement this.
+ virtual bool GetStats(DataMediaInfo* info) { return true; }
+
+ virtual bool SetSend(bool send) = 0;
+ virtual bool SetReceive(bool receive) = 0;
+
+ virtual void OnNetworkRouteChanged(const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) {}
+
+ virtual bool SendData(
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ SendDataResult* result = NULL) = 0;
+ // Signals when data is received (params, data, len)
+ sigslot::signal3<const ReceiveDataParams&,
+ const char*,
+ size_t> SignalDataReceived;
+ // Signal when the media channel is ready to send the stream. Arguments are:
+ // writable(bool)
+ sigslot::signal1<bool> SignalReadyToSend;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_MEDIACHANNEL_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/mediaconstants.cc b/third_party/libwebrtc/webrtc/media/base/mediaconstants.cc
new file mode 100644
index 0000000000..06c172c07c
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/mediaconstants.cc
@@ -0,0 +1,113 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/mediaconstants.h"
+
+#include <string>
+
+namespace cricket {
+
+const int kVideoCodecClockrate = 90000;
+const int kDataCodecClockrate = 90000;
+const int kDataMaxBandwidth = 30720; // bps
+
+const float kHighSystemCpuThreshold = 0.85f;
+const float kLowSystemCpuThreshold = 0.65f;
+const float kProcessCpuThreshold = 0.10f;
+
+const char kRtxCodecName[] = "rtx";
+const char kRedCodecName[] = "red";
+const char kUlpfecCodecName[] = "ulpfec";
+
+// TODO(brandtr): Change this to 'flexfec' when we are confident that the
+// header format is not changing anymore.
+const char kFlexfecCodecName[] = "flexfec-03";
+
+// draft-ietf-payload-flexible-fec-scheme-02.txt
+const char kFlexfecFmtpRepairWindow[] = "repair-window";
+
+const char kCodecParamAssociatedPayloadType[] = "apt";
+
+const char kOpusCodecName[] = "opus";
+const char kIsacCodecName[] = "ISAC";
+const char kL16CodecName[] = "L16";
+const char kG722CodecName[] = "G722";
+const char kIlbcCodecName[] = "ILBC";
+const char kPcmuCodecName[] = "PCMU";
+const char kPcmaCodecName[] = "PCMA";
+const char kCnCodecName[] = "CN";
+const char kDtmfCodecName[] = "telephone-event";
+
+// draft-spittka-payload-rtp-opus-03.txt
+const char kCodecParamPTime[] = "ptime";
+const char kCodecParamMaxPTime[] = "maxptime";
+const char kCodecParamMinPTime[] = "minptime";
+const char kCodecParamSPropStereo[] = "sprop-stereo";
+const char kCodecParamStereo[] = "stereo";
+const char kCodecParamUseInbandFec[] = "useinbandfec";
+const char kCodecParamUseDtx[] = "usedtx";
+const char kCodecParamMaxAverageBitrate[] = "maxaveragebitrate";
+const char kCodecParamMaxPlaybackRate[] = "maxplaybackrate";
+
+const char kCodecParamSctpProtocol[] = "protocol";
+const char kCodecParamSctpStreams[] = "streams";
+
+const char kParamValueTrue[] = "1";
+const char kParamValueEmpty[] = "";
+
+const int kOpusDefaultMaxPTime = 120;
+const int kOpusDefaultPTime = 20;
+const int kOpusDefaultMinPTime = 3;
+const int kOpusDefaultSPropStereo = 0;
+const int kOpusDefaultStereo = 0;
+const int kOpusDefaultUseInbandFec = 0;
+const int kOpusDefaultUseDtx = 0;
+const int kOpusDefaultMaxPlaybackRate = 48000;
+
+const int kPreferredMaxPTime = 120;
+const int kPreferredMinPTime = 10;
+const int kPreferredSPropStereo = 0;
+const int kPreferredStereo = 0;
+const int kPreferredUseInbandFec = 0;
+
+const char kRtcpFbParamNack[] = "nack";
+const char kRtcpFbNackParamPli[] = "pli";
+const char kRtcpFbParamRemb[] = "goog-remb";
+const char kRtcpFbParamTransportCc[] = "transport-cc";
+
+const char kRtcpFbParamCcm[] = "ccm";
+const char kRtcpFbCcmParamFir[] = "fir";
+const char kCodecParamMaxBitrate[] = "x-google-max-bitrate";
+const char kCodecParamMinBitrate[] = "x-google-min-bitrate";
+const char kCodecParamStartBitrate[] = "x-google-start-bitrate";
+const char kCodecParamMaxQuantization[] = "x-google-max-quantization";
+const char kCodecParamPort[] = "x-google-port";
+
+const int kGoogleRtpDataCodecPlType = 109;
+const char kGoogleRtpDataCodecName[] = "google-data";
+
+const int kGoogleSctpDataCodecPlType = 108;
+const char kGoogleSctpDataCodecName[] = "google-sctp-data";
+
+const char kComfortNoiseCodecName[] = "CN";
+
+const char kVp8CodecName[] = "VP8";
+const char kVp9CodecName[] = "VP9";
+const char kH264CodecName[] = "H264";
+
+// RFC 6184 RTP Payload Format for H.264 video
+const char kH264FmtpProfileLevelId[] = "profile-level-id";
+const char kH264FmtpLevelAsymmetryAllowed[] = "level-asymmetry-allowed";
+const char kH264FmtpPacketizationMode[] = "packetization-mode";
+const char kH264FmtpSpropParameterSets[] = "sprop-parameter-sets";
+const char kH264ProfileLevelConstrainedBaseline[] = "42e01f";
+
+const int kDefaultVideoMaxFramerate = 60;
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/mediaconstants.h b/third_party/libwebrtc/webrtc/media/base/mediaconstants.h
new file mode 100644
index 0000000000..106fad0cf4
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/mediaconstants.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_MEDIACONSTANTS_H_
+#define MEDIA_BASE_MEDIACONSTANTS_H_
+
+#include <string>
+
+// This file contains constants related to media.
+
+namespace cricket {
+
+extern const int kVideoCodecClockrate;
+extern const int kDataCodecClockrate;
+extern const int kDataMaxBandwidth; // bps
+
+// Default CPU thresholds.
+extern const float kHighSystemCpuThreshold;
+extern const float kLowSystemCpuThreshold;
+extern const float kProcessCpuThreshold;
+
+extern const char kRtxCodecName[];
+extern const char kRedCodecName[];
+extern const char kUlpfecCodecName[];
+extern const char kFlexfecCodecName[];
+
+extern const char kFlexfecFmtpRepairWindow[];
+
+// Codec parameters
+extern const char kCodecParamAssociatedPayloadType[];
+
+extern const char kOpusCodecName[];
+extern const char kIsacCodecName[];
+extern const char kL16CodecName[];
+extern const char kG722CodecName[];
+extern const char kIlbcCodecName[];
+extern const char kPcmuCodecName[];
+extern const char kPcmaCodecName[];
+extern const char kCnCodecName[];
+extern const char kDtmfCodecName[];
+
+// Attribute parameters
+extern const char kCodecParamPTime[];
+extern const char kCodecParamMaxPTime[];
+// fmtp parameters
+extern const char kCodecParamMinPTime[];
+extern const char kCodecParamSPropStereo[];
+extern const char kCodecParamStereo[];
+extern const char kCodecParamUseInbandFec[];
+extern const char kCodecParamUseDtx[];
+extern const char kCodecParamMaxAverageBitrate[];
+extern const char kCodecParamMaxPlaybackRate[];
+extern const char kCodecParamSctpProtocol[];
+extern const char kCodecParamSctpStreams[];
+
+extern const char kParamValueTrue[];
+// Parameters are stored as parameter/value pairs. For parameters who do not
+// have a value, |kParamValueEmpty| should be used as value.
+extern const char kParamValueEmpty[];
+
+// opus parameters.
+// Default value for maxptime according to
+// http://tools.ietf.org/html/draft-spittka-payload-rtp-opus-03
+extern const int kOpusDefaultMaxPTime;
+extern const int kOpusDefaultPTime;
+extern const int kOpusDefaultMinPTime;
+extern const int kOpusDefaultSPropStereo;
+extern const int kOpusDefaultStereo;
+extern const int kOpusDefaultUseInbandFec;
+extern const int kOpusDefaultUseDtx;
+extern const int kOpusDefaultMaxPlaybackRate;
+
+// Prefered values in this code base. Note that they may differ from the default
+// values in http://tools.ietf.org/html/draft-spittka-payload-rtp-opus-03
+// Only frames larger or equal to 10 ms are currently supported in this code
+// base.
+extern const int kPreferredMaxPTime;
+extern const int kPreferredMinPTime;
+extern const int kPreferredSPropStereo;
+extern const int kPreferredStereo;
+extern const int kPreferredUseInbandFec;
+
+// rtcp-fb messages according to RFC 4585
+extern const char kRtcpFbParamNack[];
+extern const char kRtcpFbNackParamPli[];
+// rtcp-fb messages according to
+// http://tools.ietf.org/html/draft-alvestrand-rmcat-remb-00
+extern const char kRtcpFbParamRemb[];
+// rtcp-fb messages according to
+// https://tools.ietf.org/html/draft-holmer-rmcat-transport-wide-cc-extensions-01
+extern const char kRtcpFbParamTransportCc[];
+// ccm submessages according to RFC 5104
+extern const char kRtcpFbParamCcm[];
+extern const char kRtcpFbCcmParamFir[];
+// Google specific parameters
+extern const char kCodecParamMaxBitrate[];
+extern const char kCodecParamMinBitrate[];
+extern const char kCodecParamStartBitrate[];
+extern const char kCodecParamMaxQuantization[];
+extern const char kCodecParamPort[];
+
+// We put the data codec names here so callers of DataEngine::CreateChannel
+// don't have to import rtpdataengine.h to get the codec names they want to
+// pass in.
+extern const int kGoogleRtpDataCodecPlType;
+extern const char kGoogleRtpDataCodecName[];
+
+// TODO(pthatcher): Find an id that won't conflict with anything. On
+// the other hand, it really shouldn't matter since the id won't be
+// used on the wire.
+extern const int kGoogleSctpDataCodecPlType;
+extern const char kGoogleSctpDataCodecName[];
+
+extern const char kComfortNoiseCodecName[];
+
+extern const char kVp8CodecName[];
+extern const char kVp9CodecName[];
+extern const char kH264CodecName[];
+
+// RFC 6184 RTP Payload Format for H.264 video
+extern const char kH264FmtpProfileLevelId[];
+extern const char kH264FmtpLevelAsymmetryAllowed[];
+extern const char kH264FmtpPacketizationMode[];
+extern const char kH264FmtpSpropParameterSets[];
+extern const char kH264ProfileLevelConstrainedBaseline[];
+
+extern const int kDefaultVideoMaxFramerate;
+} // namespace cricket
+
+#endif // MEDIA_BASE_MEDIACONSTANTS_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/mediaengine.cc b/third_party/libwebrtc/webrtc/media/base/mediaengine.cc
new file mode 100644
index 0000000000..281ddbb76f
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/mediaengine.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/mediaengine.h"
+
+#if !defined(DISABLE_MEDIA_ENGINE_FACTORY)
+
+namespace cricket {
+
+MediaEngineFactory::MediaEngineCreateFunction
+ MediaEngineFactory::create_function_ = NULL;
+
+MediaEngineFactory::MediaEngineCreateFunction
+ MediaEngineFactory::SetCreateFunction(MediaEngineCreateFunction function) {
+ MediaEngineCreateFunction old_function = create_function_;
+ create_function_ = function;
+ return old_function;
+}
+
+}; // namespace cricket
+
+#endif // DISABLE_MEDIA_ENGINE_FACTORY
+
+namespace cricket {
+
+webrtc::RtpParameters CreateRtpParametersWithOneEncoding() {
+ webrtc::RtpParameters parameters;
+ webrtc::RtpEncodingParameters encoding;
+ parameters.encodings.push_back(encoding);
+ return parameters;
+}
+
+}; // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/mediaengine.h b/third_party/libwebrtc/webrtc/media/base/mediaengine.h
new file mode 100644
index 0000000000..483a96b7e8
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/mediaengine.h
@@ -0,0 +1,188 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_MEDIAENGINE_H_
+#define MEDIA_BASE_MEDIAENGINE_H_
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include <CoreAudio/CoreAudio.h>
+#endif
+
+#include <string>
+#include <tuple>
+#include <utility>
+#include <vector>
+
+#include "api/audio_codecs/audio_decoder_factory.h"
+#include "api/audio_codecs/audio_encoder_factory.h"
+#include "api/rtpparameters.h"
+#include "call/audio_state.h"
+#include "media/base/codec.h"
+#include "media/base/mediachannel.h"
+#include "media/base/videocommon.h"
+#include "rtc_base/platform_file.h"
+
+#if defined(GOOGLE_CHROME_BUILD) || defined(CHROMIUM_BUILD)
+#define DISABLE_MEDIA_ENGINE_FACTORY
+#endif
+
+namespace webrtc {
+class AudioDeviceModule;
+class AudioMixer;
+class AudioProcessing;
+class Call;
+}
+
+namespace cricket {
+
+struct RtpCapabilities {
+ std::vector<webrtc::RtpExtension> header_extensions;
+};
+
+// MediaEngineInterface is an abstraction of a media engine which can be
+// subclassed to support different media componentry backends.
+// It supports voice and video operations in the same class to facilitate
+// proper synchronization between both media types.
+class MediaEngineInterface {
+ public:
+ virtual ~MediaEngineInterface() {}
+
+ // Initialization
+ // Starts the engine.
+ virtual bool Init() = 0;
+ // TODO(solenberg): Remove once VoE API refactoring is done.
+ virtual rtc::scoped_refptr<webrtc::AudioState> GetAudioState() const = 0;
+
+ // MediaChannel creation
+ // Creates a voice media channel. Returns NULL on failure.
+ virtual VoiceMediaChannel* CreateChannel(webrtc::Call* call,
+ const MediaConfig& config,
+ const AudioOptions& options) = 0;
+ // Creates a video media channel, paired with the specified voice channel.
+ // Returns NULL on failure.
+ virtual VideoMediaChannel* CreateVideoChannel(
+ webrtc::Call* call,
+ const MediaConfig& config,
+ const VideoOptions& options) = 0;
+
+ // Gets the current microphone level, as a value between 0 and 10.
+ virtual int GetInputLevel() = 0;
+
+ virtual const std::vector<AudioCodec>& audio_send_codecs() = 0;
+ virtual const std::vector<AudioCodec>& audio_recv_codecs() = 0;
+ virtual RtpCapabilities GetAudioCapabilities() = 0;
+ virtual std::vector<VideoCodec> video_codecs() = 0;
+ virtual RtpCapabilities GetVideoCapabilities() = 0;
+
+ // Starts AEC dump using existing file, a maximum file size in bytes can be
+ // specified. Logging is stopped just before the size limit is exceeded.
+ // If max_size_bytes is set to a value <= 0, no limit will be used.
+ virtual bool StartAecDump(rtc::PlatformFile file, int64_t max_size_bytes) = 0;
+
+ // Stops recording AEC dump.
+ virtual void StopAecDump() = 0;
+};
+
+
+#if !defined(DISABLE_MEDIA_ENGINE_FACTORY)
+class MediaEngineFactory {
+ public:
+ typedef cricket::MediaEngineInterface* (*MediaEngineCreateFunction)();
+ // Creates a media engine, using either the compiled system default or the
+ // creation function specified in SetCreateFunction, if specified.
+ static MediaEngineInterface* Create();
+ // Sets the function used when calling Create. If unset, the compiled system
+ // default will be used. Returns the old create function, or NULL if one
+ // wasn't set. Likewise, NULL can be used as the |function| parameter to
+ // reset to the default behavior.
+ static MediaEngineCreateFunction SetCreateFunction(
+ MediaEngineCreateFunction function);
+ private:
+ static MediaEngineCreateFunction create_function_;
+};
+#endif
+
+// CompositeMediaEngine constructs a MediaEngine from separate
+// voice and video engine classes.
+template <class VOICE, class VIDEO>
+class CompositeMediaEngine : public MediaEngineInterface {
+ public:
+ template <class... Args1, class... Args2>
+ CompositeMediaEngine(std::tuple<Args1...> first_args,
+ std::tuple<Args2...> second_args)
+ : engines_(std::piecewise_construct,
+ std::move(first_args),
+ std::move(second_args)) {}
+
+ virtual ~CompositeMediaEngine() {}
+ virtual bool Init() {
+ voice().Init();
+ return true;
+ }
+
+ virtual rtc::scoped_refptr<webrtc::AudioState> GetAudioState() const {
+ return voice().GetAudioState();
+ }
+ virtual VoiceMediaChannel* CreateChannel(webrtc::Call* call,
+ const MediaConfig& config,
+ const AudioOptions& options) {
+ return voice().CreateChannel(call, config, options);
+ }
+ virtual VideoMediaChannel* CreateVideoChannel(webrtc::Call* call,
+ const MediaConfig& config,
+ const VideoOptions& options) {
+ return video().CreateChannel(call, config, options);
+ }
+
+ virtual int GetInputLevel() { return voice().GetInputLevel(); }
+ virtual const std::vector<AudioCodec>& audio_send_codecs() {
+ return voice().send_codecs();
+ }
+ virtual const std::vector<AudioCodec>& audio_recv_codecs() {
+ return voice().recv_codecs();
+ }
+ virtual RtpCapabilities GetAudioCapabilities() {
+ return voice().GetCapabilities();
+ }
+ virtual std::vector<VideoCodec> video_codecs() { return video().codecs(); }
+ virtual RtpCapabilities GetVideoCapabilities() {
+ return video().GetCapabilities();
+ }
+
+ virtual bool StartAecDump(rtc::PlatformFile file, int64_t max_size_bytes) {
+ return voice().StartAecDump(file, max_size_bytes);
+ }
+
+ virtual void StopAecDump() { voice().StopAecDump(); }
+
+ protected:
+ VOICE& voice() { return engines_.first; }
+ VIDEO& video() { return engines_.second; }
+ const VOICE& voice() const { return engines_.first; }
+ const VIDEO& video() const { return engines_.second; }
+
+ private:
+ std::pair<VOICE, VIDEO> engines_;
+};
+
+enum DataChannelType { DCT_NONE = 0, DCT_RTP = 1, DCT_SCTP = 2 };
+
+class DataEngineInterface {
+ public:
+ virtual ~DataEngineInterface() {}
+ virtual DataMediaChannel* CreateChannel(const MediaConfig& config) = 0;
+ virtual const std::vector<DataCodec>& data_codecs() = 0;
+};
+
+webrtc::RtpParameters CreateRtpParametersWithOneEncoding();
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_MEDIAENGINE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/rtpdataengine.cc b/third_party/libwebrtc/webrtc/media/base/rtpdataengine.cc
new file mode 100644
index 0000000000..7cb5fa8585
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/rtpdataengine.cc
@@ -0,0 +1,355 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/rtpdataengine.h"
+
+#include <map>
+
+#include "media/base/codec.h"
+#include "media/base/mediaconstants.h"
+#include "media/base/rtputils.h"
+#include "media/base/streamparams.h"
+#include "rtc_base/copyonwritebuffer.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ratelimiter.h"
+#include "rtc_base/sanitizer.h"
+#include "rtc_base/stringutils.h"
+
+namespace cricket {
+
+// We want to avoid IP fragmentation.
+static const size_t kDataMaxRtpPacketLen = 1200U;
+// We reserve space after the RTP header for future wiggle room.
+static const unsigned char kReservedSpace[] = {
+ 0x00, 0x00, 0x00, 0x00
+};
+
+// Amount of overhead SRTP may take. We need to leave room in the
+// buffer for it, otherwise SRTP will fail later. If SRTP ever uses
+// more than this, we need to increase this number.
+static const size_t kMaxSrtpHmacOverhead = 16;
+
+RtpDataEngine::RtpDataEngine() {
+ data_codecs_.push_back(
+ DataCodec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName));
+}
+
+DataMediaChannel* RtpDataEngine::CreateChannel(
+ const MediaConfig& config) {
+ return new RtpDataMediaChannel(config);
+}
+
+static const DataCodec* FindCodecByName(const std::vector<DataCodec>& codecs,
+ const std::string& name) {
+ for (const DataCodec& codec : codecs) {
+ if (_stricmp(name.c_str(), codec.name.c_str()) == 0)
+ return &codec;
+ }
+ return nullptr;
+}
+
+RtpDataMediaChannel::RtpDataMediaChannel(const MediaConfig& config)
+ : DataMediaChannel(config) {
+ Construct();
+}
+
+void RtpDataMediaChannel::Construct() {
+ sending_ = false;
+ receiving_ = false;
+ send_limiter_.reset(new rtc::RateLimiter(kDataMaxBandwidth / 8, 1.0));
+}
+
+
+RtpDataMediaChannel::~RtpDataMediaChannel() {
+ std::map<uint32_t, RtpClock*>::const_iterator iter;
+ for (iter = rtp_clock_by_send_ssrc_.begin();
+ iter != rtp_clock_by_send_ssrc_.end();
+ ++iter) {
+ delete iter->second;
+ }
+}
+
+void RTC_NO_SANITIZE("float-cast-overflow") // bugs.webrtc.org/8204
+RtpClock::Tick(double now, int* seq_num, uint32_t* timestamp) {
+ *seq_num = ++last_seq_num_;
+ *timestamp = timestamp_offset_ + static_cast<uint32_t>(now * clockrate_);
+ // UBSan: 5.92374e+10 is outside the range of representable values of type
+ // 'unsigned int'
+}
+
+const DataCodec* FindUnknownCodec(const std::vector<DataCodec>& codecs) {
+ DataCodec data_codec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName);
+ std::vector<DataCodec>::const_iterator iter;
+ for (iter = codecs.begin(); iter != codecs.end(); ++iter) {
+ if (!iter->Matches(data_codec)) {
+ return &(*iter);
+ }
+ }
+ return NULL;
+}
+
+const DataCodec* FindKnownCodec(const std::vector<DataCodec>& codecs) {
+ DataCodec data_codec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName);
+ std::vector<DataCodec>::const_iterator iter;
+ for (iter = codecs.begin(); iter != codecs.end(); ++iter) {
+ if (iter->Matches(data_codec)) {
+ return &(*iter);
+ }
+ }
+ return NULL;
+}
+
+bool RtpDataMediaChannel::SetRecvCodecs(const std::vector<DataCodec>& codecs) {
+ const DataCodec* unknown_codec = FindUnknownCodec(codecs);
+ if (unknown_codec) {
+ RTC_LOG(LS_WARNING) << "Failed to SetRecvCodecs because of unknown codec: "
+ << unknown_codec->ToString();
+ return false;
+ }
+
+ recv_codecs_ = codecs;
+ return true;
+}
+
+bool RtpDataMediaChannel::SetSendCodecs(const std::vector<DataCodec>& codecs) {
+ const DataCodec* known_codec = FindKnownCodec(codecs);
+ if (!known_codec) {
+ RTC_LOG(LS_WARNING)
+ << "Failed to SetSendCodecs because there is no known codec.";
+ return false;
+ }
+
+ send_codecs_ = codecs;
+ return true;
+}
+
+bool RtpDataMediaChannel::SetSendParameters(const DataSendParameters& params) {
+ return (SetSendCodecs(params.codecs) &&
+ SetMaxSendBandwidth(params.max_bandwidth_bps));
+}
+
+bool RtpDataMediaChannel::SetRecvParameters(const DataRecvParameters& params) {
+ return SetRecvCodecs(params.codecs);
+}
+
+bool RtpDataMediaChannel::AddSendStream(const StreamParams& stream) {
+ if (!stream.has_ssrcs()) {
+ return false;
+ }
+
+ if (GetStreamBySsrc(send_streams_, stream.first_ssrc())) {
+ RTC_LOG(LS_WARNING) << "Not adding data send stream '" << stream.id
+ << "' with ssrc=" << stream.first_ssrc()
+ << " because stream already exists.";
+ return false;
+ }
+
+ send_streams_.push_back(stream);
+ // TODO(pthatcher): This should be per-stream, not per-ssrc.
+ // And we should probably allow more than one per stream.
+ rtp_clock_by_send_ssrc_[stream.first_ssrc()] = new RtpClock(
+ kDataCodecClockrate,
+ rtc::CreateRandomNonZeroId(), rtc::CreateRandomNonZeroId());
+
+ RTC_LOG(LS_INFO) << "Added data send stream '" << stream.id
+ << "' with ssrc=" << stream.first_ssrc();
+ return true;
+}
+
+bool RtpDataMediaChannel::RemoveSendStream(uint32_t ssrc) {
+ if (!GetStreamBySsrc(send_streams_, ssrc)) {
+ return false;
+ }
+
+ RemoveStreamBySsrc(&send_streams_, ssrc);
+ delete rtp_clock_by_send_ssrc_[ssrc];
+ rtp_clock_by_send_ssrc_.erase(ssrc);
+ return true;
+}
+
+bool RtpDataMediaChannel::AddRecvStream(const StreamParams& stream) {
+ if (!stream.has_ssrcs()) {
+ return false;
+ }
+
+ if (GetStreamBySsrc(recv_streams_, stream.first_ssrc())) {
+ RTC_LOG(LS_WARNING) << "Not adding data recv stream '" << stream.id
+ << "' with ssrc=" << stream.first_ssrc()
+ << " because stream already exists.";
+ return false;
+ }
+
+ recv_streams_.push_back(stream);
+ RTC_LOG(LS_INFO) << "Added data recv stream '" << stream.id
+ << "' with ssrc=" << stream.first_ssrc();
+ return true;
+}
+
+bool RtpDataMediaChannel::RemoveRecvStream(uint32_t ssrc) {
+ RemoveStreamBySsrc(&recv_streams_, ssrc);
+ return true;
+}
+
+void RtpDataMediaChannel::OnPacketReceived(
+ rtc::CopyOnWriteBuffer* packet, const rtc::PacketTime& packet_time) {
+ RtpHeader header;
+ if (!GetRtpHeader(packet->cdata(), packet->size(), &header)) {
+ // Don't want to log for every corrupt packet.
+ // RTC_LOG(LS_WARNING) << "Could not read rtp header from packet of length "
+ // << packet->length() << ".";
+ return;
+ }
+
+ size_t header_length;
+ if (!GetRtpHeaderLen(packet->cdata(), packet->size(), &header_length)) {
+ // Don't want to log for every corrupt packet.
+ // RTC_LOG(LS_WARNING) << "Could not read rtp header"
+ // << length from packet of length "
+ // << packet->length() << ".";
+ return;
+ }
+ const char* data =
+ packet->cdata<char>() + header_length + sizeof(kReservedSpace);
+ size_t data_len = packet->size() - header_length - sizeof(kReservedSpace);
+
+ if (!receiving_) {
+ RTC_LOG(LS_WARNING) << "Not receiving packet " << header.ssrc << ":"
+ << header.seq_num << " before SetReceive(true) called.";
+ return;
+ }
+
+ if (!FindCodecById(recv_codecs_, header.payload_type)) {
+ // For bundling, this will be logged for every message.
+ // So disable this logging.
+ // RTC_LOG(LS_WARNING) << "Not receiving packet "
+ // << header.ssrc << ":" << header.seq_num
+ // << " (" << data_len << ")"
+ // << " because unknown payload id: " << header.payload_type;
+ return;
+ }
+
+ if (!GetStreamBySsrc(recv_streams_, header.ssrc)) {
+ RTC_LOG(LS_WARNING) << "Received packet for unknown ssrc: " << header.ssrc;
+ return;
+ }
+
+ // Uncomment this for easy debugging.
+ // const auto* found_stream = GetStreamBySsrc(recv_streams_, header.ssrc);
+ // RTC_LOG(LS_INFO) << "Received packet"
+ // << " groupid=" << found_stream.groupid
+ // << ", ssrc=" << header.ssrc
+ // << ", seqnum=" << header.seq_num
+ // << ", timestamp=" << header.timestamp
+ // << ", len=" << data_len;
+
+ ReceiveDataParams params;
+ params.ssrc = header.ssrc;
+ params.seq_num = header.seq_num;
+ params.timestamp = header.timestamp;
+ SignalDataReceived(params, data, data_len);
+}
+
+bool RtpDataMediaChannel::SetMaxSendBandwidth(int bps) {
+ if (bps <= 0) {
+ bps = kDataMaxBandwidth;
+ }
+ send_limiter_.reset(new rtc::RateLimiter(bps / 8, 1.0));
+ RTC_LOG(LS_INFO) << "RtpDataMediaChannel::SetSendBandwidth to " << bps
+ << "bps.";
+ return true;
+}
+
+bool RtpDataMediaChannel::SendData(
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ SendDataResult* result) {
+ if (result) {
+ // If we return true, we'll set this to SDR_SUCCESS.
+ *result = SDR_ERROR;
+ }
+ if (!sending_) {
+ RTC_LOG(LS_WARNING) << "Not sending packet with ssrc=" << params.ssrc
+ << " len=" << payload.size()
+ << " before SetSend(true).";
+ return false;
+ }
+
+ if (params.type != cricket::DMT_TEXT) {
+ RTC_LOG(LS_WARNING)
+ << "Not sending data because binary type is unsupported.";
+ return false;
+ }
+
+ const StreamParams* found_stream =
+ GetStreamBySsrc(send_streams_, params.ssrc);
+ if (!found_stream) {
+ RTC_LOG(LS_WARNING) << "Not sending data because ssrc is unknown: "
+ << params.ssrc;
+ return false;
+ }
+
+ const DataCodec* found_codec =
+ FindCodecByName(send_codecs_, kGoogleRtpDataCodecName);
+ if (!found_codec) {
+ RTC_LOG(LS_WARNING) << "Not sending data because codec is unknown: "
+ << kGoogleRtpDataCodecName;
+ return false;
+ }
+
+ size_t packet_len = (kMinRtpPacketLen + sizeof(kReservedSpace) +
+ payload.size() + kMaxSrtpHmacOverhead);
+ if (packet_len > kDataMaxRtpPacketLen) {
+ return false;
+ }
+
+ double now =
+ rtc::TimeMicros() / static_cast<double>(rtc::kNumMicrosecsPerSec);
+
+ if (!send_limiter_->CanUse(packet_len, now)) {
+ RTC_LOG(LS_VERBOSE) << "Dropped data packet of len=" << packet_len
+ << "; already sent " << send_limiter_->used_in_period()
+ << "/" << send_limiter_->max_per_period();
+ return false;
+ }
+
+ RtpHeader header;
+ header.payload_type = found_codec->id;
+ header.ssrc = params.ssrc;
+ rtp_clock_by_send_ssrc_[header.ssrc]->Tick(
+ now, &header.seq_num, &header.timestamp);
+
+ rtc::CopyOnWriteBuffer packet(kMinRtpPacketLen, packet_len);
+ if (!SetRtpHeader(packet.data(), packet.size(), header)) {
+ return false;
+ }
+ packet.AppendData(kReservedSpace);
+ packet.AppendData(payload);
+
+ RTC_LOG(LS_VERBOSE) << "Sent RTP data packet: "
+ << " stream=" << found_stream->id
+ << " ssrc=" << header.ssrc
+ << ", seqnum=" << header.seq_num
+ << ", timestamp=" << header.timestamp
+ << ", len=" << payload.size();
+
+ MediaChannel::SendPacket(&packet, rtc::PacketOptions());
+ send_limiter_->Use(packet_len, now);
+ if (result) {
+ *result = SDR_SUCCESS;
+ }
+ return true;
+}
+
+rtc::DiffServCodePoint RtpDataMediaChannel::PreferredDscp() const {
+ return rtc::DSCP_AF41;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/rtpdataengine.h b/third_party/libwebrtc/webrtc/media/base/rtpdataengine.h
new file mode 100644
index 0000000000..64e083b0fd
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/rtpdataengine.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_RTPDATAENGINE_H_
+#define MEDIA_BASE_RTPDATAENGINE_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "media/base/mediachannel.h"
+#include "media/base/mediaconstants.h"
+#include "media/base/mediaengine.h"
+
+namespace cricket {
+
+struct DataCodec;
+
+class RtpDataEngine : public DataEngineInterface {
+ public:
+ RtpDataEngine();
+
+ virtual DataMediaChannel* CreateChannel(const MediaConfig& config);
+
+ virtual const std::vector<DataCodec>& data_codecs() {
+ return data_codecs_;
+ }
+
+ private:
+ std::vector<DataCodec> data_codecs_;
+};
+
+// Keep track of sequence number and timestamp of an RTP stream. The
+// sequence number starts with a "random" value and increments. The
+// timestamp starts with a "random" value and increases monotonically
+// according to the clockrate.
+class RtpClock {
+ public:
+ RtpClock(int clockrate, uint16_t first_seq_num, uint32_t timestamp_offset)
+ : clockrate_(clockrate),
+ last_seq_num_(first_seq_num),
+ timestamp_offset_(timestamp_offset) {}
+
+ // Given the current time (in number of seconds which must be
+ // monotonically increasing), Return the next sequence number and
+ // timestamp.
+ void Tick(double now, int* seq_num, uint32_t* timestamp);
+
+ private:
+ int clockrate_;
+ uint16_t last_seq_num_;
+ uint32_t timestamp_offset_;
+};
+
+class RtpDataMediaChannel : public DataMediaChannel {
+ public:
+ explicit RtpDataMediaChannel(const MediaConfig& config);
+ virtual ~RtpDataMediaChannel();
+
+ virtual bool SetSendParameters(const DataSendParameters& params);
+ virtual bool SetRecvParameters(const DataRecvParameters& params);
+ virtual bool AddSendStream(const StreamParams& sp);
+ virtual bool RemoveSendStream(uint32_t ssrc);
+ virtual bool AddRecvStream(const StreamParams& sp);
+ virtual bool RemoveRecvStream(uint32_t ssrc);
+ virtual bool SetSend(bool send) {
+ sending_ = send;
+ return true;
+ }
+ virtual bool SetReceive(bool receive) {
+ receiving_ = receive;
+ return true;
+ }
+ virtual void OnPacketReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time);
+ virtual void OnRtcpReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) {}
+ virtual void OnReadyToSend(bool ready) {}
+ virtual bool SendData(
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ SendDataResult* result);
+ virtual rtc::DiffServCodePoint PreferredDscp() const;
+
+ private:
+ void Construct();
+ bool SetMaxSendBandwidth(int bps);
+ bool SetSendCodecs(const std::vector<DataCodec>& codecs);
+ bool SetRecvCodecs(const std::vector<DataCodec>& codecs);
+
+ bool sending_;
+ bool receiving_;
+ std::vector<DataCodec> send_codecs_;
+ std::vector<DataCodec> recv_codecs_;
+ std::vector<StreamParams> send_streams_;
+ std::vector<StreamParams> recv_streams_;
+ std::map<uint32_t, RtpClock*> rtp_clock_by_send_ssrc_;
+ std::unique_ptr<rtc::RateLimiter> send_limiter_;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_RTPDATAENGINE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/rtpdataengine_unittest.cc b/third_party/libwebrtc/webrtc/media/base/rtpdataengine_unittest.cc
new file mode 100644
index 0000000000..a05c3de264
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/rtpdataengine_unittest.cc
@@ -0,0 +1,377 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <string>
+
+#include "media/base/fakenetworkinterface.h"
+#include "media/base/mediaconstants.h"
+#include "media/base/rtpdataengine.h"
+#include "media/base/rtputils.h"
+#include "rtc_base/copyonwritebuffer.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/ssladapter.h"
+
+class FakeDataReceiver : public sigslot::has_slots<> {
+ public:
+ FakeDataReceiver() : has_received_data_(false) {}
+
+ void OnDataReceived(
+ const cricket::ReceiveDataParams& params,
+ const char* data, size_t len) {
+ has_received_data_ = true;
+ last_received_data_ = std::string(data, len);
+ last_received_data_len_ = len;
+ last_received_data_params_ = params;
+ }
+
+ bool has_received_data() const { return has_received_data_; }
+ std::string last_received_data() const { return last_received_data_; }
+ size_t last_received_data_len() const { return last_received_data_len_; }
+ cricket::ReceiveDataParams last_received_data_params() const {
+ return last_received_data_params_;
+ }
+
+ private:
+ bool has_received_data_;
+ std::string last_received_data_;
+ size_t last_received_data_len_;
+ cricket::ReceiveDataParams last_received_data_params_;
+};
+
+class RtpDataMediaChannelTest : public testing::Test {
+ protected:
+ virtual void SetUp() {
+ // Seed needed for each test to satisfy expectations.
+ iface_.reset(new cricket::FakeNetworkInterface());
+ dme_.reset(CreateEngine());
+ receiver_.reset(new FakeDataReceiver());
+ }
+
+ void SetNow(double now) {
+ clock_.SetTimeNanos(now * rtc::kNumNanosecsPerSec);
+ }
+
+ cricket::RtpDataEngine* CreateEngine() {
+ cricket::RtpDataEngine* dme = new cricket::RtpDataEngine();
+ return dme;
+ }
+
+ cricket::RtpDataMediaChannel* CreateChannel() {
+ return CreateChannel(dme_.get());
+ }
+
+ cricket::RtpDataMediaChannel* CreateChannel(cricket::RtpDataEngine* dme) {
+ cricket::MediaConfig config;
+ cricket::RtpDataMediaChannel* channel =
+ static_cast<cricket::RtpDataMediaChannel*>(dme->CreateChannel(config));
+ channel->SetInterface(iface_.get());
+ channel->SignalDataReceived.connect(
+ receiver_.get(), &FakeDataReceiver::OnDataReceived);
+ return channel;
+ }
+
+ FakeDataReceiver* receiver() {
+ return receiver_.get();
+ }
+
+ bool HasReceivedData() {
+ return receiver_->has_received_data();
+ }
+
+ std::string GetReceivedData() {
+ return receiver_->last_received_data();
+ }
+
+ size_t GetReceivedDataLen() {
+ return receiver_->last_received_data_len();
+ }
+
+ cricket::ReceiveDataParams GetReceivedDataParams() {
+ return receiver_->last_received_data_params();
+ }
+
+ bool HasSentData(int count) {
+ return (iface_->NumRtpPackets() > count);
+ }
+
+ std::string GetSentData(int index) {
+ // Assume RTP header of length 12
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> packet(
+ iface_->GetRtpPacket(index));
+ if (packet->size() > 12) {
+ return std::string(packet->data<char>() + 12, packet->size() - 12);
+ } else {
+ return "";
+ }
+ }
+
+ cricket::RtpHeader GetSentDataHeader(int index) {
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> packet(
+ iface_->GetRtpPacket(index));
+ cricket::RtpHeader header;
+ GetRtpHeader(packet->data(), packet->size(), &header);
+ return header;
+ }
+
+ private:
+ std::unique_ptr<cricket::RtpDataEngine> dme_;
+ rtc::ScopedFakeClock clock_;
+ std::unique_ptr<cricket::FakeNetworkInterface> iface_;
+ std::unique_ptr<FakeDataReceiver> receiver_;
+};
+
+TEST_F(RtpDataMediaChannelTest, SetUnknownCodecs) {
+ std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
+
+ cricket::DataCodec known_codec;
+ known_codec.id = 103;
+ known_codec.name = "google-data";
+ cricket::DataCodec unknown_codec;
+ unknown_codec.id = 104;
+ unknown_codec.name = "unknown-data";
+
+ cricket::DataSendParameters send_parameters_known;
+ send_parameters_known.codecs.push_back(known_codec);
+ cricket::DataRecvParameters recv_parameters_known;
+ recv_parameters_known.codecs.push_back(known_codec);
+
+ cricket::DataSendParameters send_parameters_unknown;
+ send_parameters_unknown.codecs.push_back(unknown_codec);
+ cricket::DataRecvParameters recv_parameters_unknown;
+ recv_parameters_unknown.codecs.push_back(unknown_codec);
+
+ cricket::DataSendParameters send_parameters_mixed;
+ send_parameters_mixed.codecs.push_back(known_codec);
+ send_parameters_mixed.codecs.push_back(unknown_codec);
+ cricket::DataRecvParameters recv_parameters_mixed;
+ recv_parameters_mixed.codecs.push_back(known_codec);
+ recv_parameters_mixed.codecs.push_back(unknown_codec);
+
+ EXPECT_TRUE(dmc->SetSendParameters(send_parameters_known));
+ EXPECT_FALSE(dmc->SetSendParameters(send_parameters_unknown));
+ EXPECT_TRUE(dmc->SetSendParameters(send_parameters_mixed));
+ EXPECT_TRUE(dmc->SetRecvParameters(recv_parameters_known));
+ EXPECT_FALSE(dmc->SetRecvParameters(recv_parameters_unknown));
+ EXPECT_FALSE(dmc->SetRecvParameters(recv_parameters_mixed));
+}
+
+TEST_F(RtpDataMediaChannelTest, AddRemoveSendStream) {
+ std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
+
+ cricket::StreamParams stream1;
+ stream1.add_ssrc(41);
+ EXPECT_TRUE(dmc->AddSendStream(stream1));
+ cricket::StreamParams stream2;
+ stream2.add_ssrc(42);
+ EXPECT_TRUE(dmc->AddSendStream(stream2));
+
+ EXPECT_TRUE(dmc->RemoveSendStream(41));
+ EXPECT_TRUE(dmc->RemoveSendStream(42));
+ EXPECT_FALSE(dmc->RemoveSendStream(43));
+}
+
+TEST_F(RtpDataMediaChannelTest, AddRemoveRecvStream) {
+ std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
+
+ cricket::StreamParams stream1;
+ stream1.add_ssrc(41);
+ EXPECT_TRUE(dmc->AddRecvStream(stream1));
+ cricket::StreamParams stream2;
+ stream2.add_ssrc(42);
+ EXPECT_TRUE(dmc->AddRecvStream(stream2));
+ EXPECT_FALSE(dmc->AddRecvStream(stream2));
+
+ EXPECT_TRUE(dmc->RemoveRecvStream(41));
+ EXPECT_TRUE(dmc->RemoveRecvStream(42));
+}
+
+TEST_F(RtpDataMediaChannelTest, SendData) {
+ std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
+
+ cricket::SendDataParams params;
+ params.ssrc = 42;
+ unsigned char data[] = "food";
+ rtc::CopyOnWriteBuffer payload(data, 4);
+ unsigned char padded_data[] = {
+ 0x00, 0x00, 0x00, 0x00,
+ 'f', 'o', 'o', 'd',
+ };
+ cricket::SendDataResult result;
+
+ // Not sending
+ EXPECT_FALSE(dmc->SendData(params, payload, &result));
+ EXPECT_EQ(cricket::SDR_ERROR, result);
+ EXPECT_FALSE(HasSentData(0));
+ ASSERT_TRUE(dmc->SetSend(true));
+
+ // Unknown stream name.
+ EXPECT_FALSE(dmc->SendData(params, payload, &result));
+ EXPECT_EQ(cricket::SDR_ERROR, result);
+ EXPECT_FALSE(HasSentData(0));
+
+ cricket::StreamParams stream;
+ stream.add_ssrc(42);
+ ASSERT_TRUE(dmc->AddSendStream(stream));
+
+ // Unknown codec;
+ EXPECT_FALSE(dmc->SendData(params, payload, &result));
+ EXPECT_EQ(cricket::SDR_ERROR, result);
+ EXPECT_FALSE(HasSentData(0));
+
+ cricket::DataCodec codec;
+ codec.id = 103;
+ codec.name = cricket::kGoogleRtpDataCodecName;
+ cricket::DataSendParameters parameters;
+ parameters.codecs.push_back(codec);
+ ASSERT_TRUE(dmc->SetSendParameters(parameters));
+
+ // Length too large;
+ std::string x10000(10000, 'x');
+ EXPECT_FALSE(dmc->SendData(
+ params, rtc::CopyOnWriteBuffer(x10000.data(), x10000.length()), &result));
+ EXPECT_EQ(cricket::SDR_ERROR, result);
+ EXPECT_FALSE(HasSentData(0));
+
+ // Finally works!
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_EQ(cricket::SDR_SUCCESS, result);
+ ASSERT_TRUE(HasSentData(0));
+ EXPECT_EQ(sizeof(padded_data), GetSentData(0).length());
+ EXPECT_EQ(0, memcmp(
+ padded_data, GetSentData(0).data(), sizeof(padded_data)));
+ cricket::RtpHeader header0 = GetSentDataHeader(0);
+ EXPECT_NE(0, header0.seq_num);
+ EXPECT_NE(0U, header0.timestamp);
+ EXPECT_EQ(header0.ssrc, 42U);
+ EXPECT_EQ(header0.payload_type, 103);
+
+ // Should bump timestamp by 180000 because the clock rate is 90khz.
+ SetNow(2);
+
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ ASSERT_TRUE(HasSentData(1));
+ EXPECT_EQ(sizeof(padded_data), GetSentData(1).length());
+ EXPECT_EQ(0, memcmp(
+ padded_data, GetSentData(1).data(), sizeof(padded_data)));
+ cricket::RtpHeader header1 = GetSentDataHeader(1);
+ EXPECT_EQ(header1.ssrc, 42U);
+ EXPECT_EQ(header1.payload_type, 103);
+ EXPECT_EQ(static_cast<uint16_t>(header0.seq_num + 1),
+ static_cast<uint16_t>(header1.seq_num));
+ EXPECT_EQ(header0.timestamp + 180000, header1.timestamp);
+}
+
+TEST_F(RtpDataMediaChannelTest, SendDataRate) {
+ std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
+
+ ASSERT_TRUE(dmc->SetSend(true));
+
+ cricket::DataCodec codec;
+ codec.id = 103;
+ codec.name = cricket::kGoogleRtpDataCodecName;
+ cricket::DataSendParameters parameters;
+ parameters.codecs.push_back(codec);
+ ASSERT_TRUE(dmc->SetSendParameters(parameters));
+
+ cricket::StreamParams stream;
+ stream.add_ssrc(42);
+ ASSERT_TRUE(dmc->AddSendStream(stream));
+
+ cricket::SendDataParams params;
+ params.ssrc = 42;
+ unsigned char data[] = "food";
+ rtc::CopyOnWriteBuffer payload(data, 4);
+ cricket::SendDataResult result;
+
+ // With rtp overhead of 32 bytes, each one of our packets is 36
+ // bytes, or 288 bits. So, a limit of 872bps will allow 3 packets,
+ // but not four.
+ parameters.max_bandwidth_bps = 872;
+ ASSERT_TRUE(dmc->SetSendParameters(parameters));
+
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_FALSE(dmc->SendData(params, payload, &result));
+ EXPECT_FALSE(dmc->SendData(params, payload, &result));
+
+ SetNow(0.9);
+ EXPECT_FALSE(dmc->SendData(params, payload, &result));
+
+ SetNow(1.1);
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ SetNow(1.9);
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+
+ SetNow(2.2);
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_FALSE(dmc->SendData(params, payload, &result));
+}
+
+TEST_F(RtpDataMediaChannelTest, ReceiveData) {
+ // PT= 103, SN=2, TS=3, SSRC = 4, data = "abcde"
+ unsigned char data[] = {
+ 0x80, 0x67, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x2A,
+ 0x00, 0x00, 0x00, 0x00,
+ 'a', 'b', 'c', 'd', 'e'
+ };
+ rtc::CopyOnWriteBuffer packet(data, sizeof(data));
+
+ std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
+
+ // SetReceived not called.
+ dmc->OnPacketReceived(&packet, rtc::PacketTime());
+ EXPECT_FALSE(HasReceivedData());
+
+ dmc->SetReceive(true);
+
+ // Unknown payload id
+ dmc->OnPacketReceived(&packet, rtc::PacketTime());
+ EXPECT_FALSE(HasReceivedData());
+
+ cricket::DataCodec codec;
+ codec.id = 103;
+ codec.name = cricket::kGoogleRtpDataCodecName;
+ cricket::DataRecvParameters parameters;
+ parameters.codecs.push_back(codec);
+ ASSERT_TRUE(dmc->SetRecvParameters(parameters));
+
+ // Unknown stream
+ dmc->OnPacketReceived(&packet, rtc::PacketTime());
+ EXPECT_FALSE(HasReceivedData());
+
+ cricket::StreamParams stream;
+ stream.add_ssrc(42);
+ ASSERT_TRUE(dmc->AddRecvStream(stream));
+
+ // Finally works!
+ dmc->OnPacketReceived(&packet, rtc::PacketTime());
+ EXPECT_TRUE(HasReceivedData());
+ EXPECT_EQ("abcde", GetReceivedData());
+ EXPECT_EQ(5U, GetReceivedDataLen());
+}
+
+TEST_F(RtpDataMediaChannelTest, InvalidRtpPackets) {
+ unsigned char data[] = {
+ 0x80, 0x65, 0x00, 0x02
+ };
+ rtc::CopyOnWriteBuffer packet(data, sizeof(data));
+
+ std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
+
+ // Too short
+ dmc->OnPacketReceived(&packet, rtc::PacketTime());
+ EXPECT_FALSE(HasReceivedData());
+}
diff --git a/third_party/libwebrtc/webrtc/media/base/rtputils.cc b/third_party/libwebrtc/webrtc/media/base/rtputils.cc
new file mode 100644
index 0000000000..d0ba1cf72b
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/rtputils.cc
@@ -0,0 +1,473 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/rtputils.h"
+
+// PacketTimeUpdateParams is defined in asyncpacketsocket.h.
+// TODO(sergeyu): Find more appropriate place for PacketTimeUpdateParams.
+#include "media/base/turnutils.h"
+#include "rtc_base/asyncpacketsocket.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/messagedigest.h"
+
+namespace cricket {
+
+static const uint8_t kRtpVersion = 2;
+static const size_t kRtpFlagsOffset = 0;
+static const size_t kRtpPayloadTypeOffset = 1;
+static const size_t kRtpSeqNumOffset = 2;
+static const size_t kRtpTimestampOffset = 4;
+static const size_t kRtpSsrcOffset = 8;
+static const size_t kRtcpPayloadTypeOffset = 1;
+static const size_t kRtpExtensionHeaderLen = 4;
+static const size_t kAbsSendTimeExtensionLen = 3;
+static const size_t kOneByteExtensionHeaderLen = 1;
+
+namespace {
+
+// Fake auth tag written by the sender when external authentication is enabled.
+// HMAC in packet will be compared against this value before updating packet
+// with actual HMAC value.
+static const uint8_t kFakeAuthTag[10] = {
+ 0xba, 0xdd, 0xba, 0xdd, 0xba, 0xdd, 0xba, 0xdd, 0xba, 0xdd
+};
+
+void UpdateAbsSendTimeExtensionValue(uint8_t* extension_data,
+ size_t length,
+ uint64_t time_us) {
+ // Absolute send time in RTP streams.
+ //
+ // The absolute send time is signaled to the receiver in-band using the
+ // general mechanism for RTP header extensions [RFC5285]. The payload
+ // of this extension (the transmitted value) is a 24-bit unsigned integer
+ // containing the sender's current time in seconds as a fixed point number
+ // with 18 bits fractional part.
+ //
+ // The form of the absolute send time extension block:
+ //
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | len=2 | absolute send time |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ if (length != kAbsSendTimeExtensionLen) {
+ RTC_NOTREACHED();
+ return;
+ }
+
+ // Convert microseconds to a 6.18 fixed point value in seconds.
+ uint32_t send_time = ((time_us << 18) / 1000000) & 0x00FFFFFF;
+ extension_data[0] = static_cast<uint8_t>(send_time >> 16);
+ extension_data[1] = static_cast<uint8_t>(send_time >> 8);
+ extension_data[2] = static_cast<uint8_t>(send_time);
+}
+
+// Assumes |length| is actual packet length + tag length. Updates HMAC at end of
+// the RTP packet.
+void UpdateRtpAuthTag(uint8_t* rtp,
+ size_t length,
+ const rtc::PacketTimeUpdateParams& packet_time_params) {
+ // If there is no key, return.
+ if (packet_time_params.srtp_auth_key.empty()) {
+ return;
+ }
+
+ size_t tag_length = packet_time_params.srtp_auth_tag_len;
+
+ // ROC (rollover counter) is at the beginning of the auth tag.
+ const size_t kRocLength = 4;
+ if (tag_length < kRocLength || tag_length > length) {
+ RTC_NOTREACHED();
+ return;
+ }
+
+ uint8_t* auth_tag = rtp + (length - tag_length);
+
+ // We should have a fake HMAC value @ auth_tag.
+ RTC_DCHECK_EQ(0, memcmp(auth_tag, kFakeAuthTag, tag_length));
+
+ // Copy ROC after end of rtp packet.
+ memcpy(auth_tag, &packet_time_params.srtp_packet_index, kRocLength);
+ // Authentication of a RTP packet will have RTP packet + ROC size.
+ size_t auth_required_length = length - tag_length + kRocLength;
+
+ uint8_t output[64];
+ size_t result = rtc::ComputeHmac(
+ rtc::DIGEST_SHA_1, &packet_time_params.srtp_auth_key[0],
+ packet_time_params.srtp_auth_key.size(), rtp,
+ auth_required_length, output, sizeof(output));
+
+ if (result < tag_length) {
+ RTC_NOTREACHED();
+ return;
+ }
+
+ // Copy HMAC from output to packet. This is required as auth tag length
+ // may not be equal to the actual HMAC length.
+ memcpy(auth_tag, output, tag_length);
+}
+
+} // namespace
+
+bool GetUint8(const void* data, size_t offset, int* value) {
+ if (!data || !value) {
+ return false;
+ }
+ *value = *(static_cast<const uint8_t*>(data) + offset);
+ return true;
+}
+
+bool GetUint16(const void* data, size_t offset, int* value) {
+ if (!data || !value) {
+ return false;
+ }
+ *value = static_cast<int>(
+ rtc::GetBE16(static_cast<const uint8_t*>(data) + offset));
+ return true;
+}
+
+bool GetUint32(const void* data, size_t offset, uint32_t* value) {
+ if (!data || !value) {
+ return false;
+ }
+ *value = rtc::GetBE32(static_cast<const uint8_t*>(data) + offset);
+ return true;
+}
+
+bool SetUint8(void* data, size_t offset, uint8_t value) {
+ if (!data) {
+ return false;
+ }
+ rtc::Set8(data, offset, value);
+ return true;
+}
+
+bool SetUint16(void* data, size_t offset, uint16_t value) {
+ if (!data) {
+ return false;
+ }
+ rtc::SetBE16(static_cast<uint8_t*>(data) + offset, value);
+ return true;
+}
+
+bool SetUint32(void* data, size_t offset, uint32_t value) {
+ if (!data) {
+ return false;
+ }
+ rtc::SetBE32(static_cast<uint8_t*>(data) + offset, value);
+ return true;
+}
+
+bool GetRtpFlags(const void* data, size_t len, int* value) {
+ if (len < kMinRtpPacketLen) {
+ return false;
+ }
+ return GetUint8(data, kRtpFlagsOffset, value);
+}
+
+bool GetRtpPayloadType(const void* data, size_t len, int* value) {
+ if (len < kMinRtpPacketLen) {
+ return false;
+ }
+ if (!GetUint8(data, kRtpPayloadTypeOffset, value)) {
+ return false;
+ }
+ *value &= 0x7F;
+ return true;
+}
+
+bool GetRtpSeqNum(const void* data, size_t len, int* value) {
+ if (len < kMinRtpPacketLen) {
+ return false;
+ }
+ return GetUint16(data, kRtpSeqNumOffset, value);
+}
+
+bool GetRtpTimestamp(const void* data, size_t len, uint32_t* value) {
+ if (len < kMinRtpPacketLen) {
+ return false;
+ }
+ return GetUint32(data, kRtpTimestampOffset, value);
+}
+
+bool GetRtpSsrc(const void* data, size_t len, uint32_t* value) {
+ if (len < kMinRtpPacketLen) {
+ return false;
+ }
+ return GetUint32(data, kRtpSsrcOffset, value);
+}
+
+bool GetRtpHeaderLen(const void* data, size_t len, size_t* value) {
+ if (!data || len < kMinRtpPacketLen || !value) return false;
+ const uint8_t* header = static_cast<const uint8_t*>(data);
+ // Get base header size + length of CSRCs (not counting extension yet).
+ size_t header_size = kMinRtpPacketLen + (header[0] & 0xF) * sizeof(uint32_t);
+ if (len < header_size) return false;
+ // If there's an extension, read and add in the extension size.
+ if (header[0] & 0x10) {
+ if (len < header_size + sizeof(uint32_t))
+ return false;
+ header_size +=
+ ((rtc::GetBE16(header + header_size + 2) + 1) * sizeof(uint32_t));
+ if (len < header_size) return false;
+ }
+ *value = header_size;
+ return true;
+}
+
+bool GetRtpHeader(const void* data, size_t len, RtpHeader* header) {
+ return (GetRtpPayloadType(data, len, &(header->payload_type)) &&
+ GetRtpSeqNum(data, len, &(header->seq_num)) &&
+ GetRtpTimestamp(data, len, &(header->timestamp)) &&
+ GetRtpSsrc(data, len, &(header->ssrc)));
+}
+
+bool GetRtcpType(const void* data, size_t len, int* value) {
+ if (len < kMinRtcpPacketLen) {
+ return false;
+ }
+ return GetUint8(data, kRtcpPayloadTypeOffset, value);
+}
+
+// This method returns SSRC first of RTCP packet, except if packet is SDES.
+// TODO(mallinath) - Fully implement RFC 5506. This standard doesn't restrict
+// to send non-compound packets only to feedback messages.
+bool GetRtcpSsrc(const void* data, size_t len, uint32_t* value) {
+ // Packet should be at least of 8 bytes, to get SSRC from a RTCP packet.
+ if (!data || len < kMinRtcpPacketLen + 4 || !value) return false;
+ int pl_type;
+ if (!GetRtcpType(data, len, &pl_type)) return false;
+ // SDES packet parsing is not supported.
+ if (pl_type == kRtcpTypeSDES) return false;
+ *value = rtc::GetBE32(static_cast<const uint8_t*>(data) + 4);
+ return true;
+}
+
+bool SetRtpSsrc(void* data, size_t len, uint32_t value) {
+ return SetUint32(data, kRtpSsrcOffset, value);
+}
+
+// Assumes version 2, no padding, no extensions, no csrcs.
+bool SetRtpHeader(void* data, size_t len, const RtpHeader& header) {
+ if (!IsValidRtpPayloadType(header.payload_type) ||
+ header.seq_num < 0 || header.seq_num > UINT16_MAX) {
+ return false;
+ }
+ return (SetUint8(data, kRtpFlagsOffset, kRtpVersion << 6) &&
+ SetUint8(data, kRtpPayloadTypeOffset, header.payload_type & 0x7F) &&
+ SetUint16(data, kRtpSeqNumOffset,
+ static_cast<uint16_t>(header.seq_num)) &&
+ SetUint32(data, kRtpTimestampOffset, header.timestamp) &&
+ SetRtpSsrc(data, len, header.ssrc));
+}
+
+bool IsRtpPacket(const void* data, size_t len) {
+ if (len < kMinRtpPacketLen)
+ return false;
+
+ return (static_cast<const uint8_t*>(data)[0] >> 6) == kRtpVersion;
+}
+
+bool IsValidRtpPayloadType(int payload_type) {
+ return payload_type >= 0 && payload_type <= 127;
+}
+
+bool IsValidRtpRtcpPacketSize(bool rtcp, size_t size) {
+ return (rtcp ? size >= kMinRtcpPacketLen : size >= kMinRtpPacketLen) &&
+ size <= kMaxRtpPacketLen;
+}
+
+const char* RtpRtcpStringLiteral(bool rtcp) {
+ return rtcp ? "RTCP" : "RTP";
+}
+
+bool ValidateRtpHeader(const uint8_t* rtp,
+ size_t length,
+ size_t* header_length) {
+ if (header_length) {
+ *header_length = 0;
+ }
+
+ if (length < kMinRtpPacketLen) {
+ return false;
+ }
+
+ size_t cc_count = rtp[0] & 0x0F;
+ size_t header_length_without_extension = kMinRtpPacketLen + 4 * cc_count;
+ if (header_length_without_extension > length) {
+ return false;
+ }
+
+ // If extension bit is not set, we are done with header processing, as input
+ // length is verified above.
+ if (!(rtp[0] & 0x10)) {
+ if (header_length)
+ *header_length = header_length_without_extension;
+
+ return true;
+ }
+
+ rtp += header_length_without_extension;
+
+ if (header_length_without_extension + kRtpExtensionHeaderLen > length) {
+ return false;
+ }
+
+ // Getting extension profile length.
+ // Length is in 32 bit words.
+ uint16_t extension_length_in_32bits = rtc::GetBE16(rtp + 2);
+ size_t extension_length = extension_length_in_32bits * 4;
+
+ size_t rtp_header_length = extension_length +
+ header_length_without_extension +
+ kRtpExtensionHeaderLen;
+
+ // Verify input length against total header size.
+ if (rtp_header_length > length) {
+ return false;
+ }
+
+ if (header_length) {
+ *header_length = rtp_header_length;
+ }
+ return true;
+}
+
+// ValidateRtpHeader() must be called before this method to make sure, we have
+// a sane rtp packet.
+bool UpdateRtpAbsSendTimeExtension(uint8_t* rtp,
+ size_t length,
+ int extension_id,
+ uint64_t time_us) {
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // |V=2|P|X| CC |M| PT | sequence number |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | timestamp |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | synchronization source (SSRC) identifier |
+ // +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+ // | contributing source (CSRC) identifiers |
+ // | .... |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ // Return if extension bit is not set.
+ if (!(rtp[0] & 0x10)) {
+ return true;
+ }
+
+ size_t cc_count = rtp[0] & 0x0F;
+ size_t header_length_without_extension = kMinRtpPacketLen + 4 * cc_count;
+
+ rtp += header_length_without_extension;
+
+ // Getting extension profile ID and length.
+ uint16_t profile_id = rtc::GetBE16(rtp);
+ // Length is in 32 bit words.
+ uint16_t extension_length_in_32bits = rtc::GetBE16(rtp + 2);
+ size_t extension_length = extension_length_in_32bits * 4;
+
+ rtp += kRtpExtensionHeaderLen; // Moving past extension header.
+
+ bool found = false;
+ // WebRTC is using one byte header extension.
+ // TODO(mallinath) - Handle two byte header extension.
+ if (profile_id == 0xBEDE) { // OneByte extension header
+ // 0
+ // 0 1 2 3 4 5 6 7
+ // +-+-+-+-+-+-+-+-+
+ // | ID |length |
+ // +-+-+-+-+-+-+-+-+
+
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | 0xBE | 0xDE | length=3 |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | L=0 | data | ID | L=1 | data...
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // ...data | 0 (pad) | 0 (pad) | ID | L=3 |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | data |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ const uint8_t* extension_start = rtp;
+ const uint8_t* extension_end = extension_start + extension_length;
+
+ while (rtp < extension_end) {
+ const int id = (*rtp & 0xF0) >> 4;
+ const size_t length = (*rtp & 0x0F) + 1;
+ if (rtp + kOneByteExtensionHeaderLen + length > extension_end) {
+ return false;
+ }
+ // The 4-bit length is the number minus one of data bytes of this header
+ // extension element following the one-byte header.
+ if (id == extension_id) {
+ UpdateAbsSendTimeExtensionValue(rtp + kOneByteExtensionHeaderLen,
+ length, time_us);
+ found = true;
+ break;
+ }
+ rtp += kOneByteExtensionHeaderLen + length;
+ // Counting padding bytes.
+ while ((rtp < extension_end) && (*rtp == 0)) {
+ ++rtp;
+ }
+ }
+ }
+ return found;
+}
+
+bool ApplyPacketOptions(uint8_t* data,
+ size_t length,
+ const rtc::PacketTimeUpdateParams& packet_time_params,
+ uint64_t time_us) {
+ RTC_DCHECK(data);
+ RTC_DCHECK(length);
+
+ // if there is no valid |rtp_sendtime_extension_id| and |srtp_auth_key| in
+ // PacketOptions, nothing to be updated in this packet.
+ if (packet_time_params.rtp_sendtime_extension_id == -1 &&
+ packet_time_params.srtp_auth_key.empty()) {
+ return true;
+ }
+
+ // If there is a srtp auth key present then the packet must be an RTP packet.
+ // RTP packet may have been wrapped in a TURN Channel Data or TURN send
+ // indication.
+ size_t rtp_start_pos;
+ size_t rtp_length;
+ if (!UnwrapTurnPacket(data, length, &rtp_start_pos, &rtp_length)) {
+ RTC_NOTREACHED();
+ return false;
+ }
+
+ // Making sure we have a valid RTP packet at the end.
+ if (!IsRtpPacket(data + rtp_start_pos, rtp_length) ||
+ !ValidateRtpHeader(data + rtp_start_pos, rtp_length, nullptr)) {
+ RTC_NOTREACHED();
+ return false;
+ }
+
+ uint8_t* start = data + rtp_start_pos;
+ // If packet option has non default value (-1) for sendtime extension id,
+ // then we should parse the rtp packet to update the timestamp. Otherwise
+ // just calculate HMAC and update packet with it.
+ if (packet_time_params.rtp_sendtime_extension_id != -1) {
+ UpdateRtpAbsSendTimeExtension(start, rtp_length,
+ packet_time_params.rtp_sendtime_extension_id,
+ time_us);
+ }
+
+ UpdateRtpAuthTag(start, rtp_length, packet_time_params);
+ return true;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/rtputils.h b/third_party/libwebrtc/webrtc/media/base/rtputils.h
new file mode 100644
index 0000000000..0b7205cf8f
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/rtputils.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_RTPUTILS_H_
+#define MEDIA_BASE_RTPUTILS_H_
+
+#include "rtc_base/byteorder.h"
+
+namespace rtc {
+struct PacketTimeUpdateParams;
+} // namespace rtc
+
+namespace cricket {
+
+const size_t kMinRtpPacketLen = 12;
+const size_t kMaxRtpPacketLen = 2048;
+const size_t kMinRtcpPacketLen = 4;
+
+struct RtpHeader {
+ int payload_type;
+ int seq_num;
+ uint32_t timestamp;
+ uint32_t ssrc;
+};
+
+enum RtcpTypes {
+ kRtcpTypeSR = 200, // Sender report payload type.
+ kRtcpTypeRR = 201, // Receiver report payload type.
+ kRtcpTypeSDES = 202, // SDES payload type.
+ kRtcpTypeBye = 203, // BYE payload type.
+ kRtcpTypeApp = 204, // APP payload type.
+ kRtcpTypeRTPFB = 205, // Transport layer Feedback message payload type.
+ kRtcpTypePSFB = 206, // Payload-specific Feedback message payload type.
+};
+
+bool GetRtpPayloadType(const void* data, size_t len, int* value);
+bool GetRtpSeqNum(const void* data, size_t len, int* value);
+bool GetRtpTimestamp(const void* data, size_t len, uint32_t* value);
+bool GetRtpSsrc(const void* data, size_t len, uint32_t* value);
+bool GetRtpHeaderLen(const void* data, size_t len, size_t* value);
+bool GetRtcpType(const void* data, size_t len, int* value);
+bool GetRtcpSsrc(const void* data, size_t len, uint32_t* value);
+bool GetRtpHeader(const void* data, size_t len, RtpHeader* header);
+
+bool SetRtpSsrc(void* data, size_t len, uint32_t value);
+// Assumes version 2, no padding, no extensions, no csrcs.
+bool SetRtpHeader(void* data, size_t len, const RtpHeader& header);
+
+bool IsRtpPacket(const void* data, size_t len);
+
+// True if |payload type| is 0-127.
+bool IsValidRtpPayloadType(int payload_type);
+
+// True if |size| is appropriate for the indicated packet type.
+bool IsValidRtpRtcpPacketSize(bool rtcp, size_t size);
+
+// TODO(zstein): Consider using an enum instead of a bool to differentiate
+// between RTP and RTCP.
+// Returns "RTCP" or "RTP" according to |rtcp|.
+const char* RtpRtcpStringLiteral(bool rtcp);
+
+// Verifies that a packet has a valid RTP header.
+bool ValidateRtpHeader(const uint8_t* rtp,
+ size_t length,
+ size_t* header_length);
+
+// Helper method which updates the absolute send time extension if present.
+bool UpdateRtpAbsSendTimeExtension(uint8_t* rtp,
+ size_t length,
+ int extension_id,
+ uint64_t time_us);
+
+// Applies specified |options| to the packet. It updates the absolute send time
+// extension header if it is present present then updates HMAC.
+bool ApplyPacketOptions(uint8_t* data,
+ size_t length,
+ const rtc::PacketTimeUpdateParams& packet_time_params,
+ uint64_t time_us);
+
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_RTPUTILS_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/rtputils_unittest.cc b/third_party/libwebrtc/webrtc/media/base/rtputils_unittest.cc
new file mode 100644
index 0000000000..a71eac7a07
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/rtputils_unittest.cc
@@ -0,0 +1,353 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <vector>
+
+#include "media/base/rtputils.h"
+#include "media/base/fakertp.h"
+#include "rtc_base/asyncpacketsocket.h"
+#include "rtc_base/gunit.h"
+
+namespace cricket {
+
+static const uint8_t kRtpPacketWithMarker[] = {
+ 0x80, 0x80, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01
+};
+// 3 CSRCs (0x01020304, 0x12345678, 0xAABBCCDD)
+// Extension (0xBEDE, 0x1122334455667788)
+static const uint8_t kRtpPacketWithMarkerAndCsrcAndExtension[] = {
+ 0x93, 0x80, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
+ 0x01, 0x02, 0x03, 0x04, 0x12, 0x34, 0x56, 0x78, 0xAA, 0xBB, 0xCC, 0xDD,
+ 0xBE, 0xDE, 0x00, 0x02, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88
+};
+static const uint8_t kInvalidPacket[] = { 0x80, 0x00 };
+static const uint8_t kInvalidPacketWithCsrc[] = {
+ 0x83, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
+ 0x01, 0x02, 0x03, 0x04, 0x12, 0x34, 0x56, 0x78, 0xAA, 0xBB, 0xCC
+};
+static const uint8_t kInvalidPacketWithCsrcAndExtension1[] = {
+ 0x93, 0x80, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
+ 0x01, 0x02, 0x03, 0x04, 0x12, 0x34, 0x56, 0x78, 0xAA, 0xBB, 0xCC, 0xDD,
+ 0xBE, 0xDE, 0x00
+};
+static const uint8_t kInvalidPacketWithCsrcAndExtension2[] = {
+ 0x93, 0x80, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
+ 0x01, 0x02, 0x03, 0x04, 0x12, 0x34, 0x56, 0x78, 0xAA, 0xBB, 0xCC, 0xDD,
+ 0xBE, 0xDE, 0x00, 0x02, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77
+};
+
+// PT = 206, FMT = 1, Sender SSRC = 0x1111, Media SSRC = 0x1111
+// No FCI information is needed for PLI.
+static const uint8_t kNonCompoundRtcpPliFeedbackPacket[] = {
+ 0x81, 0xCE, 0x00, 0x0C, 0x00, 0x00, 0x11, 0x11, 0x00, 0x00, 0x11, 0x11
+};
+
+// Packet has only mandatory fixed RTCP header
+// PT = 204, SSRC = 0x1111
+static const uint8_t kNonCompoundRtcpAppPacket[] = {
+ 0x81, 0xCC, 0x00, 0x0C, 0x00, 0x00, 0x11, 0x11
+};
+
+// PT = 202, Source count = 0
+static const uint8_t kNonCompoundRtcpSDESPacket[] = {
+ 0x80, 0xCA, 0x00, 0x00
+};
+
+static uint8_t kFakeTag[4] = { 0xba, 0xdd, 0xba, 0xdd };
+static uint8_t kTestKey[] = "12345678901234567890";
+static uint8_t kTestAstValue[3] = { 0xaa, 0xbb, 0xcc };
+
+// Valid rtp Message with 2 byte header extension.
+static uint8_t kRtpMsgWith2ByteExtnHeader[] = {
+ 0x90, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0xAA, 0xBB, 0xCC, 0XDD, // SSRC
+ 0x10, 0x00, 0x00, 0x01, // 2 Byte header extension
+ 0x01, 0x00, 0x00, 0x00
+};
+
+// RTP packet with single byte extension header of length 4 bytes.
+// Extension id = 3 and length = 3
+static uint8_t kRtpMsgWithAbsSendTimeExtension[] = {
+ 0x90, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0xBE, 0xDE, 0x00, 0x02,
+ 0x22, 0x00, 0x02, 0x1c,
+ 0x32, 0xaa, 0xbb, 0xcc,
+};
+
+// Index of AbsSendTimeExtn data in message |kRtpMsgWithAbsSendTimeExtension|.
+static const int kAstIndexInRtpMsg = 21;
+
+TEST(RtpUtilsTest, GetRtp) {
+ EXPECT_TRUE(IsRtpPacket(kPcmuFrame, sizeof(kPcmuFrame)));
+
+ int pt;
+ EXPECT_TRUE(GetRtpPayloadType(kPcmuFrame, sizeof(kPcmuFrame), &pt));
+ EXPECT_EQ(0, pt);
+ EXPECT_TRUE(GetRtpPayloadType(kRtpPacketWithMarker,
+ sizeof(kRtpPacketWithMarker), &pt));
+ EXPECT_EQ(0, pt);
+
+ int seq_num;
+ EXPECT_TRUE(GetRtpSeqNum(kPcmuFrame, sizeof(kPcmuFrame), &seq_num));
+ EXPECT_EQ(1, seq_num);
+
+ uint32_t ts;
+ EXPECT_TRUE(GetRtpTimestamp(kPcmuFrame, sizeof(kPcmuFrame), &ts));
+ EXPECT_EQ(0u, ts);
+
+ uint32_t ssrc;
+ EXPECT_TRUE(GetRtpSsrc(kPcmuFrame, sizeof(kPcmuFrame), &ssrc));
+ EXPECT_EQ(1u, ssrc);
+
+ RtpHeader header;
+ EXPECT_TRUE(GetRtpHeader(kPcmuFrame, sizeof(kPcmuFrame), &header));
+ EXPECT_EQ(0, header.payload_type);
+ EXPECT_EQ(1, header.seq_num);
+ EXPECT_EQ(0u, header.timestamp);
+ EXPECT_EQ(1u, header.ssrc);
+
+ EXPECT_FALSE(GetRtpPayloadType(kInvalidPacket, sizeof(kInvalidPacket), &pt));
+ EXPECT_FALSE(GetRtpSeqNum(kInvalidPacket, sizeof(kInvalidPacket), &seq_num));
+ EXPECT_FALSE(GetRtpTimestamp(kInvalidPacket, sizeof(kInvalidPacket), &ts));
+ EXPECT_FALSE(GetRtpSsrc(kInvalidPacket, sizeof(kInvalidPacket), &ssrc));
+}
+
+TEST(RtpUtilsTest, SetRtpHeader) {
+ uint8_t packet[] = {
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
+ };
+
+ RtpHeader header = { 9, 1111, 2222u, 3333u };
+ EXPECT_TRUE(SetRtpHeader(packet, sizeof(packet), header));
+
+ // Bits: 10 0 0 0000
+ EXPECT_EQ(128u, packet[0]);
+ size_t len;
+ EXPECT_TRUE(GetRtpHeaderLen(packet, sizeof(packet), &len));
+ EXPECT_EQ(12U, len);
+ EXPECT_TRUE(GetRtpHeader(packet, sizeof(packet), &header));
+ EXPECT_EQ(9, header.payload_type);
+ EXPECT_EQ(1111, header.seq_num);
+ EXPECT_EQ(2222u, header.timestamp);
+ EXPECT_EQ(3333u, header.ssrc);
+}
+
+TEST(RtpUtilsTest, GetRtpHeaderLen) {
+ size_t len;
+ EXPECT_TRUE(GetRtpHeaderLen(kPcmuFrame, sizeof(kPcmuFrame), &len));
+ EXPECT_EQ(12U, len);
+
+ EXPECT_TRUE(GetRtpHeaderLen(kRtpPacketWithMarkerAndCsrcAndExtension,
+ sizeof(kRtpPacketWithMarkerAndCsrcAndExtension),
+ &len));
+ EXPECT_EQ(sizeof(kRtpPacketWithMarkerAndCsrcAndExtension), len);
+
+ EXPECT_FALSE(GetRtpHeaderLen(kInvalidPacket, sizeof(kInvalidPacket), &len));
+ EXPECT_FALSE(GetRtpHeaderLen(kInvalidPacketWithCsrc,
+ sizeof(kInvalidPacketWithCsrc), &len));
+ EXPECT_FALSE(GetRtpHeaderLen(kInvalidPacketWithCsrcAndExtension1,
+ sizeof(kInvalidPacketWithCsrcAndExtension1),
+ &len));
+ EXPECT_FALSE(GetRtpHeaderLen(kInvalidPacketWithCsrcAndExtension2,
+ sizeof(kInvalidPacketWithCsrcAndExtension2),
+ &len));
+}
+
+TEST(RtpUtilsTest, GetRtcp) {
+ int pt;
+ EXPECT_TRUE(GetRtcpType(kRtcpReport, sizeof(kRtcpReport), &pt));
+ EXPECT_EQ(0xc9, pt);
+
+ EXPECT_FALSE(GetRtcpType(kInvalidPacket, sizeof(kInvalidPacket), &pt));
+
+ uint32_t ssrc;
+ EXPECT_TRUE(GetRtcpSsrc(kNonCompoundRtcpPliFeedbackPacket,
+ sizeof(kNonCompoundRtcpPliFeedbackPacket),
+ &ssrc));
+ EXPECT_TRUE(GetRtcpSsrc(kNonCompoundRtcpAppPacket,
+ sizeof(kNonCompoundRtcpAppPacket),
+ &ssrc));
+ EXPECT_FALSE(GetRtcpSsrc(kNonCompoundRtcpSDESPacket,
+ sizeof(kNonCompoundRtcpSDESPacket),
+ &ssrc));
+}
+
+// Invalid RTP packets.
+TEST(RtpUtilsTest, InvalidRtpHeader) {
+ // Rtp message with invalid length.
+ const uint8_t kRtpMsgWithInvalidLength[] = {
+ 0x94, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0xAA, 0xBB, 0xCC, 0XDD, // SSRC
+ 0xDD, 0xCC, 0xBB, 0xAA, // Only 1 CSRC, but CC count is 4.
+ };
+ EXPECT_FALSE(ValidateRtpHeader(kRtpMsgWithInvalidLength,
+ sizeof(kRtpMsgWithInvalidLength), nullptr));
+
+ // Rtp message with single byte header extension, invalid extension length.
+ const uint8_t kRtpMsgWithInvalidExtnLength[] = {
+ 0x90, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0xBE, 0xDE, 0x0A, 0x00, // Extn length - 0x0A00
+ };
+ EXPECT_FALSE(ValidateRtpHeader(kRtpMsgWithInvalidExtnLength,
+ sizeof(kRtpMsgWithInvalidExtnLength),
+ nullptr));
+}
+
+// Valid RTP packet with a 2byte header extension.
+TEST(RtpUtilsTest, Valid2ByteExtnHdrRtpMessage) {
+ EXPECT_TRUE(ValidateRtpHeader(kRtpMsgWith2ByteExtnHeader,
+ sizeof(kRtpMsgWith2ByteExtnHeader), nullptr));
+}
+
+// Valid RTP packet which has 1 byte header AbsSendTime extension in it.
+TEST(RtpUtilsTest, ValidRtpPacketWithAbsSendTimeExtension) {
+ EXPECT_TRUE(ValidateRtpHeader(kRtpMsgWithAbsSendTimeExtension,
+ sizeof(kRtpMsgWithAbsSendTimeExtension),
+ nullptr));
+}
+
+// Verify handling of a 2 byte extension header RTP messsage. Currently these
+// messages are not supported.
+TEST(RtpUtilsTest, UpdateAbsSendTimeExtensionIn2ByteHeaderExtn) {
+ std::vector<uint8_t> data(
+ kRtpMsgWith2ByteExtnHeader,
+ kRtpMsgWith2ByteExtnHeader + sizeof(kRtpMsgWith2ByteExtnHeader));
+ EXPECT_FALSE(UpdateRtpAbsSendTimeExtension(&data[0], data.size(), 3, 0));
+}
+
+// Verify finding an extension ID in the TURN send indication message.
+TEST(RtpUtilsTest, UpdateAbsSendTimeExtensionInTurnSendIndication) {
+ // A valid STUN indication message with a valid RTP header in data attribute
+ // payload field and no extension bit set.
+ uint8_t message_without_extension[] = {
+ 0x00, 0x16, 0x00, 0x18, // length of
+ 0x21, 0x12, 0xA4, 0x42, // magic cookie
+ '0', '1', '2', '3', // transaction id
+ '4', '5', '6', '7',
+ '8', '9', 'a', 'b',
+ 0x00, 0x20, 0x00, 0x04, // Mapped address.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x13, 0x00, 0x0C, // Data attribute.
+ 0x80, 0x00, 0x00, 0x00, // RTP packet.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ };
+ EXPECT_TRUE(UpdateRtpAbsSendTimeExtension(
+ message_without_extension, sizeof(message_without_extension), 3, 0));
+
+ // A valid STUN indication message with a valid RTP header and a extension
+ // header.
+ uint8_t message[] = {
+ 0x00, 0x16, 0x00, 0x24, // length of
+ 0x21, 0x12, 0xA4, 0x42, // magic cookie
+ '0', '1', '2', '3', // transaction id
+ '4', '5', '6', '7',
+ '8', '9', 'a', 'b',
+ 0x00, 0x20, 0x00, 0x04, // Mapped address.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x13, 0x00, 0x18, // Data attribute.
+ 0x90, 0x00, 0x00, 0x00, // RTP packet.
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xBE, 0xDE,
+ 0x00, 0x02, 0x22, 0xaa, 0xbb, 0xcc, 0x32, 0xaa, 0xbb, 0xcc,
+ };
+ EXPECT_TRUE(UpdateRtpAbsSendTimeExtension(message, sizeof(message), 3, 0));
+}
+
+// Test without any packet options variables set. This method should return
+// without HMAC value in the packet.
+TEST(RtpUtilsTest, ApplyPacketOptionsWithDefaultValues) {
+ rtc::PacketTimeUpdateParams packet_time_params;
+ std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
+ kRtpMsgWithAbsSendTimeExtension +
+ sizeof(kRtpMsgWithAbsSendTimeExtension));
+ rtp_packet.insert(rtp_packet.end(), kFakeTag, kFakeTag + sizeof(kFakeTag));
+ EXPECT_TRUE(ApplyPacketOptions(&rtp_packet[0], rtp_packet.size(),
+ packet_time_params, 0));
+
+ // Making sure HMAC wasn't updated..
+ EXPECT_EQ(0, memcmp(&rtp_packet[sizeof(kRtpMsgWithAbsSendTimeExtension)],
+ kFakeTag, 4));
+
+ // Verify AbsouluteSendTime extension field wasn't modified.
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kTestAstValue,
+ sizeof(kTestAstValue)));
+}
+
+// Veirfy HMAC is updated when packet option parameters are set.
+TEST(RtpUtilsTest, ApplyPacketOptionsWithAuthParams) {
+ rtc::PacketTimeUpdateParams packet_time_params;
+ packet_time_params.srtp_auth_key.assign(kTestKey,
+ kTestKey + sizeof(kTestKey));
+ packet_time_params.srtp_auth_tag_len = 4;
+
+ std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
+ kRtpMsgWithAbsSendTimeExtension +
+ sizeof(kRtpMsgWithAbsSendTimeExtension));
+ rtp_packet.insert(rtp_packet.end(), kFakeTag, kFakeTag + sizeof(kFakeTag));
+ EXPECT_TRUE(ApplyPacketOptions(&rtp_packet[0], rtp_packet.size(),
+ packet_time_params, 0));
+
+ uint8_t kExpectedTag[] = {0xc1, 0x7a, 0x8c, 0xa0};
+ EXPECT_EQ(0, memcmp(&rtp_packet[sizeof(kRtpMsgWithAbsSendTimeExtension)],
+ kExpectedTag, sizeof(kExpectedTag)));
+
+ // Verify AbsouluteSendTime extension field is not modified.
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kTestAstValue,
+ sizeof(kTestAstValue)));
+}
+
+// Verify finding an extension ID in a raw rtp message.
+TEST(RtpUtilsTest, UpdateAbsSendTimeExtensionInRtpPacket) {
+ std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
+ kRtpMsgWithAbsSendTimeExtension +
+ sizeof(kRtpMsgWithAbsSendTimeExtension));
+
+ EXPECT_TRUE(UpdateRtpAbsSendTimeExtension(&rtp_packet[0], rtp_packet.size(),
+ 3, 51183266));
+
+ // Verify that the timestamp was updated.
+ const uint8_t kExpectedTimestamp[3] = {0xcc, 0xbb, 0xaa};
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kExpectedTimestamp,
+ sizeof(kExpectedTimestamp)));
+}
+
+// Verify we update both AbsSendTime extension header and HMAC.
+TEST(RtpUtilsTest, ApplyPacketOptionsWithAuthParamsAndAbsSendTime) {
+ rtc::PacketTimeUpdateParams packet_time_params;
+ packet_time_params.srtp_auth_key.assign(kTestKey,
+ kTestKey + sizeof(kTestKey));
+ packet_time_params.srtp_auth_tag_len = 4;
+ packet_time_params.rtp_sendtime_extension_id = 3;
+ // 3 is also present in the test message.
+
+ std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
+ kRtpMsgWithAbsSendTimeExtension +
+ sizeof(kRtpMsgWithAbsSendTimeExtension));
+ rtp_packet.insert(rtp_packet.end(), kFakeTag, kFakeTag + sizeof(kFakeTag));
+ EXPECT_TRUE(ApplyPacketOptions(&rtp_packet[0], rtp_packet.size(),
+ packet_time_params, 51183266));
+
+ const uint8_t kExpectedTag[] = {0x81, 0xd1, 0x2c, 0x0e};
+ EXPECT_EQ(0, memcmp(&rtp_packet[sizeof(kRtpMsgWithAbsSendTimeExtension)],
+ kExpectedTag, sizeof(kExpectedTag)));
+
+ // Verify that the timestamp was updated.
+ const uint8_t kExpectedTimestamp[3] = {0xcc, 0xbb, 0xaa};
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kExpectedTimestamp,
+ sizeof(kExpectedTimestamp)));
+}
+
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/streamparams.cc b/third_party/libwebrtc/webrtc/media/base/streamparams.cc
new file mode 100644
index 0000000000..fd61a87ffd
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/streamparams.cc
@@ -0,0 +1,268 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/streamparams.h"
+
+#include <list>
+#include <sstream>
+
+namespace cricket {
+namespace {
+// NOTE: There is no check here for duplicate streams, so check before
+// adding.
+void AddStream(std::vector<StreamParams>* streams, const StreamParams& stream) {
+ streams->push_back(stream);
+}
+}
+
+const char kFecSsrcGroupSemantics[] = "FEC";
+const char kFecFrSsrcGroupSemantics[] = "FEC-FR";
+const char kFidSsrcGroupSemantics[] = "FID";
+const char kSimSsrcGroupSemantics[] = "SIM";
+
+bool GetStream(const StreamParamsVec& streams,
+ const StreamSelector& selector,
+ StreamParams* stream_out) {
+ const StreamParams* found = GetStream(streams, selector);
+ if (found && stream_out)
+ *stream_out = *found;
+ return found != nullptr;
+}
+
+bool MediaStreams::GetAudioStream(
+ const StreamSelector& selector, StreamParams* stream) {
+ return GetStream(audio_, selector, stream);
+}
+
+bool MediaStreams::GetVideoStream(
+ const StreamSelector& selector, StreamParams* stream) {
+ return GetStream(video_, selector, stream);
+}
+
+bool MediaStreams::GetDataStream(
+ const StreamSelector& selector, StreamParams* stream) {
+ return GetStream(data_, selector, stream);
+}
+
+void MediaStreams::CopyFrom(const MediaStreams& streams) {
+ audio_ = streams.audio_;
+ video_ = streams.video_;
+ data_ = streams.data_;
+}
+
+void MediaStreams::AddAudioStream(const StreamParams& stream) {
+ AddStream(&audio_, stream);
+}
+
+void MediaStreams::AddVideoStream(const StreamParams& stream) {
+ AddStream(&video_, stream);
+}
+
+void MediaStreams::AddDataStream(const StreamParams& stream) {
+ AddStream(&data_, stream);
+}
+
+bool MediaStreams::RemoveAudioStream(
+ const StreamSelector& selector) {
+ return RemoveStream(&audio_, selector);
+}
+
+bool MediaStreams::RemoveVideoStream(
+ const StreamSelector& selector) {
+ return RemoveStream(&video_, selector);
+}
+
+bool MediaStreams::RemoveDataStream(
+ const StreamSelector& selector) {
+ return RemoveStream(&data_, selector);
+}
+
+static std::string SsrcsToString(const std::vector<uint32_t>& ssrcs) {
+ std::ostringstream ost;
+ ost << "ssrcs:[";
+ for (std::vector<uint32_t>::const_iterator it = ssrcs.begin();
+ it != ssrcs.end(); ++it) {
+ if (it != ssrcs.begin()) {
+ ost << ",";
+ }
+ ost << *it;
+ }
+ ost << "]";
+ return ost.str();
+}
+
+bool SsrcGroup::has_semantics(const std::string& semantics_in) const {
+ return (semantics == semantics_in && ssrcs.size() > 0);
+}
+
+std::string SsrcGroup::ToString() const {
+ std::ostringstream ost;
+ ost << "{";
+ ost << "semantics:" << semantics << ";";
+ ost << SsrcsToString(ssrcs);
+ ost << "}";
+ return ost.str();
+}
+
+std::string StreamParams::ToString() const {
+ std::ostringstream ost;
+ ost << "{";
+ if (!groupid.empty()) {
+ ost << "groupid:" << groupid << ";";
+ }
+ if (!id.empty()) {
+ ost << "id:" << id << ";";
+ }
+ ost << SsrcsToString(ssrcs) << ";";
+ ost << "ssrc_groups:";
+ for (std::vector<SsrcGroup>::const_iterator it = ssrc_groups.begin();
+ it != ssrc_groups.end(); ++it) {
+ if (it != ssrc_groups.begin()) {
+ ost << ",";
+ }
+ ost << it->ToString();
+ }
+ ost << ";";
+ if (!type.empty()) {
+ ost << "type:" << type << ";";
+ }
+ if (!display.empty()) {
+ ost << "display:" << display << ";";
+ }
+ if (!cname.empty()) {
+ ost << "cname:" << cname << ";";
+ }
+ if (!sync_label.empty()) {
+ ost << "sync_label:" << sync_label;
+ }
+ ost << "}";
+ return ost.str();
+}
+void StreamParams::GetPrimarySsrcs(std::vector<uint32_t>* ssrcs) const {
+ const SsrcGroup* sim_group = get_ssrc_group(kSimSsrcGroupSemantics);
+ if (sim_group == NULL) {
+ ssrcs->push_back(first_ssrc());
+ } else {
+ for (size_t i = 0; i < sim_group->ssrcs.size(); ++i) {
+ ssrcs->push_back(sim_group->ssrcs[i]);
+ }
+ }
+}
+
+void StreamParams::GetFidSsrcs(const std::vector<uint32_t>& primary_ssrcs,
+ std::vector<uint32_t>* fid_ssrcs) const {
+ for (size_t i = 0; i < primary_ssrcs.size(); ++i) {
+ uint32_t fid_ssrc;
+ if (GetFidSsrc(primary_ssrcs[i], &fid_ssrc)) {
+ fid_ssrcs->push_back(fid_ssrc);
+ }
+ }
+}
+
+bool StreamParams::AddSecondarySsrc(const std::string& semantics,
+ uint32_t primary_ssrc,
+ uint32_t secondary_ssrc) {
+ if (!has_ssrc(primary_ssrc)) {
+ return false;
+ }
+
+ ssrcs.push_back(secondary_ssrc);
+ std::vector<uint32_t> ssrc_vector;
+ ssrc_vector.push_back(primary_ssrc);
+ ssrc_vector.push_back(secondary_ssrc);
+ SsrcGroup ssrc_group = SsrcGroup(semantics, ssrc_vector);
+ ssrc_groups.push_back(ssrc_group);
+ return true;
+}
+
+bool StreamParams::GetSecondarySsrc(const std::string& semantics,
+ uint32_t primary_ssrc,
+ uint32_t* secondary_ssrc) const {
+ for (std::vector<SsrcGroup>::const_iterator it = ssrc_groups.begin();
+ it != ssrc_groups.end(); ++it) {
+ if (it->has_semantics(semantics) &&
+ it->ssrcs.size() >= 2 &&
+ it->ssrcs[0] == primary_ssrc) {
+ *secondary_ssrc = it->ssrcs[1];
+ return true;
+ }
+ }
+ return false;
+}
+
+bool IsOneSsrcStream(const StreamParams& sp) {
+ if (sp.ssrcs.size() == 1 && sp.ssrc_groups.empty()) {
+ return true;
+ }
+ const SsrcGroup* fid_group = sp.get_ssrc_group(kFidSsrcGroupSemantics);
+ const SsrcGroup* fecfr_group = sp.get_ssrc_group(kFecFrSsrcGroupSemantics);
+ if (sp.ssrcs.size() == 2) {
+ if (fid_group != nullptr && sp.ssrcs == fid_group->ssrcs) {
+ return true;
+ }
+ if (fecfr_group != nullptr && sp.ssrcs == fecfr_group->ssrcs) {
+ return true;
+ }
+ }
+ if (sp.ssrcs.size() == 3) {
+ if (fid_group == nullptr || fecfr_group == nullptr) {
+ return false;
+ }
+ if (sp.ssrcs[0] != fid_group->ssrcs[0] ||
+ sp.ssrcs[0] != fecfr_group->ssrcs[0]) {
+ return false;
+ }
+ // We do not check for FlexFEC over RTX,
+ // as this combination is not supported.
+ if (sp.ssrcs[1] == fid_group->ssrcs[1] &&
+ sp.ssrcs[2] == fecfr_group->ssrcs[1]) {
+ return true;
+ }
+ if (sp.ssrcs[1] == fecfr_group->ssrcs[1] &&
+ sp.ssrcs[2] == fid_group->ssrcs[1]) {
+ return true;
+ }
+ }
+ return false;
+}
+
+static void RemoveFirst(std::list<uint32_t>* ssrcs, uint32_t value) {
+ std::list<uint32_t>::iterator it =
+ std::find(ssrcs->begin(), ssrcs->end(), value);
+ if (it != ssrcs->end()) {
+ ssrcs->erase(it);
+ }
+}
+
+bool IsSimulcastStream(const StreamParams& sp) {
+ const SsrcGroup* const sg = sp.get_ssrc_group(kSimSsrcGroupSemantics);
+ if (sg == NULL || sg->ssrcs.size() < 2) {
+ return false;
+ }
+ // Start with all StreamParams SSRCs. Remove simulcast SSRCs (from sg) and
+ // RTX SSRCs. If we still have SSRCs left, we don't know what they're for.
+ // Also we remove first-found SSRCs only. So duplicates should lead to errors.
+ std::list<uint32_t> sp_ssrcs(sp.ssrcs.begin(), sp.ssrcs.end());
+ for (size_t i = 0; i < sg->ssrcs.size(); ++i) {
+ RemoveFirst(&sp_ssrcs, sg->ssrcs[i]);
+ }
+ for (size_t i = 0; i < sp.ssrc_groups.size(); ++i) {
+ const SsrcGroup& group = sp.ssrc_groups[i];
+ if (group.semantics.compare(kFidSsrcGroupSemantics) != 0 ||
+ group.ssrcs.size() != 2) {
+ continue;
+ }
+ RemoveFirst(&sp_ssrcs, group.ssrcs[1]);
+ }
+ // If there's SSRCs left that we don't know how to handle, we bail out.
+ return sp_ssrcs.size() == 0;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/streamparams.h b/third_party/libwebrtc/webrtc/media/base/streamparams.h
new file mode 100644
index 0000000000..1b2ebfa871
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/streamparams.h
@@ -0,0 +1,332 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains structures for describing SSRCs from a media source such
+// as a MediaStreamTrack when it is sent across an RTP session. Multiple media
+// sources may be sent across the same RTP session, each of them will be
+// described by one StreamParams object
+// SsrcGroup is used to describe the relationship between the SSRCs that
+// are used for this media source.
+// E.x: Consider a source that is sent as 3 simulcast streams
+// Let the simulcast elements have SSRC 10, 20, 30.
+// Let each simulcast element use FEC and let the protection packets have
+// SSRC 11,21,31.
+// To describe this 4 SsrcGroups are needed,
+// StreamParams would then contain ssrc = {10,11,20,21,30,31} and
+// ssrc_groups = {{SIM,{10,20,30}, {FEC,{10,11}, {FEC, {20,21}, {FEC {30,31}}}
+// Please see RFC 5576.
+
+#ifndef MEDIA_BASE_STREAMPARAMS_H_
+#define MEDIA_BASE_STREAMPARAMS_H_
+
+#include <stdint.h>
+
+#include <algorithm>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "rtc_base/constructormagic.h"
+
+namespace cricket {
+
+extern const char kFecSsrcGroupSemantics[];
+extern const char kFecFrSsrcGroupSemantics[];
+extern const char kFidSsrcGroupSemantics[];
+extern const char kSimSsrcGroupSemantics[];
+
+struct SsrcGroup {
+ SsrcGroup(const std::string& usage, const std::vector<uint32_t>& ssrcs)
+ : semantics(usage), ssrcs(ssrcs) {}
+
+ bool operator==(const SsrcGroup& other) const {
+ return (semantics == other.semantics && ssrcs == other.ssrcs);
+ }
+ bool operator!=(const SsrcGroup &other) const {
+ return !(*this == other);
+ }
+
+ bool has_semantics(const std::string& semantics) const;
+
+ std::string ToString() const;
+
+ std::string semantics; // e.g FIX, FEC, SIM.
+ std::vector<uint32_t> ssrcs; // SSRCs of this type.
+};
+
+struct StreamParams {
+ static StreamParams CreateLegacy(uint32_t ssrc) {
+ StreamParams stream;
+ stream.ssrcs.push_back(ssrc);
+ return stream;
+ }
+
+ bool operator==(const StreamParams& other) const {
+ return (groupid == other.groupid &&
+ id == other.id &&
+ ssrcs == other.ssrcs &&
+ ssrc_groups == other.ssrc_groups &&
+ type == other.type &&
+ display == other.display &&
+ cname == other.cname &&
+ sync_label == other.sync_label);
+ }
+ bool operator!=(const StreamParams &other) const {
+ return !(*this == other);
+ }
+
+ uint32_t first_ssrc() const {
+ if (ssrcs.empty()) {
+ return 0;
+ }
+
+ return ssrcs[0];
+ }
+ bool has_ssrcs() const {
+ return !ssrcs.empty();
+ }
+ bool has_ssrc(uint32_t ssrc) const {
+ return std::find(ssrcs.begin(), ssrcs.end(), ssrc) != ssrcs.end();
+ }
+ void add_ssrc(uint32_t ssrc) { ssrcs.push_back(ssrc); }
+ bool has_ssrc_groups() const {
+ return !ssrc_groups.empty();
+ }
+ bool has_ssrc_group(const std::string& semantics) const {
+ return (get_ssrc_group(semantics) != NULL);
+ }
+ const SsrcGroup* get_ssrc_group(const std::string& semantics) const {
+ for (std::vector<SsrcGroup>::const_iterator it = ssrc_groups.begin();
+ it != ssrc_groups.end(); ++it) {
+ if (it->has_semantics(semantics)) {
+ return &(*it);
+ }
+ }
+ return NULL;
+ }
+
+ // Convenience function to add an FID ssrc for a primary_ssrc
+ // that's already been added.
+ bool AddFidSsrc(uint32_t primary_ssrc, uint32_t fid_ssrc) {
+ return AddSecondarySsrc(kFidSsrcGroupSemantics, primary_ssrc, fid_ssrc);
+ }
+
+ // Convenience function to lookup the FID ssrc for a primary_ssrc.
+ // Returns false if primary_ssrc not found or FID not defined for it.
+ bool GetFidSsrc(uint32_t primary_ssrc, uint32_t* fid_ssrc) const {
+ return GetSecondarySsrc(kFidSsrcGroupSemantics, primary_ssrc, fid_ssrc);
+ }
+
+ // Convenience function to add an FEC-FR ssrc for a primary_ssrc
+ // that's already been added.
+ bool AddFecFrSsrc(uint32_t primary_ssrc, uint32_t fecfr_ssrc) {
+ return AddSecondarySsrc(kFecFrSsrcGroupSemantics, primary_ssrc, fecfr_ssrc);
+ }
+
+ // Convenience function to lookup the FEC-FR ssrc for a primary_ssrc.
+ // Returns false if primary_ssrc not found or FEC-FR not defined for it.
+ bool GetFecFrSsrc(uint32_t primary_ssrc, uint32_t* fecfr_ssrc) const {
+ return GetSecondarySsrc(kFecFrSsrcGroupSemantics, primary_ssrc, fecfr_ssrc);
+ }
+
+ // Convenience to get all the SIM SSRCs if there are SIM ssrcs, or
+ // the first SSRC otherwise.
+ void GetPrimarySsrcs(std::vector<uint32_t>* ssrcs) const;
+
+ // Convenience to get all the FID SSRCs for the given primary ssrcs.
+ // If a given primary SSRC does not have a FID SSRC, the list of FID
+ // SSRCS will be smaller than the list of primary SSRCs.
+ void GetFidSsrcs(const std::vector<uint32_t>& primary_ssrcs,
+ std::vector<uint32_t>* fid_ssrcs) const;
+
+ std::string ToString() const;
+
+ // Resource of the MUC jid of the participant of with this stream.
+ // For 1:1 calls, should be left empty (which means remote streams
+ // and local streams should not be mixed together).
+ std::string groupid;
+ // Unique per-groupid, not across all groupids
+ std::string id;
+ std::vector<uint32_t> ssrcs; // All SSRCs for this source
+ std::vector<SsrcGroup> ssrc_groups; // e.g. FID, FEC, SIM
+ // Examples: "camera", "screencast"
+ std::string type;
+ // Friendly name describing stream
+ std::string display;
+ std::string cname; // RTCP CNAME
+ std::string sync_label; // Friendly name of cname.
+
+ private:
+ bool AddSecondarySsrc(const std::string& semantics,
+ uint32_t primary_ssrc,
+ uint32_t secondary_ssrc);
+ bool GetSecondarySsrc(const std::string& semantics,
+ uint32_t primary_ssrc,
+ uint32_t* secondary_ssrc) const;
+};
+
+// A Stream can be selected by either groupid+id or ssrc.
+struct StreamSelector {
+ explicit StreamSelector(uint32_t ssrc) : ssrc(ssrc) {}
+
+ StreamSelector(const std::string& groupid,
+ const std::string& streamid) :
+ ssrc(0),
+ groupid(groupid),
+ streamid(streamid) {
+ }
+
+ bool Matches(const StreamParams& stream) const {
+ if (ssrc == 0) {
+ return stream.groupid == groupid && stream.id == streamid;
+ } else {
+ return stream.has_ssrc(ssrc);
+ }
+ }
+
+ uint32_t ssrc;
+ std::string groupid;
+ std::string streamid;
+};
+
+typedef std::vector<StreamParams> StreamParamsVec;
+
+// A collection of audio and video and data streams. Most of the
+// methods are merely for convenience. Many of these methods are keyed
+// by ssrc, which is the source identifier in the RTP spec
+// (http://tools.ietf.org/html/rfc3550).
+// TODO(pthatcher): Add basic unit test for these.
+// See https://code.google.com/p/webrtc/issues/detail?id=4107
+struct MediaStreams {
+ public:
+ MediaStreams() {}
+ void CopyFrom(const MediaStreams& sources);
+
+ bool empty() const {
+ return audio_.empty() && video_.empty() && data_.empty();
+ }
+
+ std::vector<StreamParams>* mutable_audio() { return &audio_; }
+ std::vector<StreamParams>* mutable_video() { return &video_; }
+ std::vector<StreamParams>* mutable_data() { return &data_; }
+ const std::vector<StreamParams>& audio() const { return audio_; }
+ const std::vector<StreamParams>& video() const { return video_; }
+ const std::vector<StreamParams>& data() const { return data_; }
+
+ // Gets a stream, returning true if found.
+ bool GetAudioStream(
+ const StreamSelector& selector, StreamParams* stream);
+ bool GetVideoStream(
+ const StreamSelector& selector, StreamParams* stream);
+ bool GetDataStream(
+ const StreamSelector& selector, StreamParams* stream);
+ // Adds a stream.
+ void AddAudioStream(const StreamParams& stream);
+ void AddVideoStream(const StreamParams& stream);
+ void AddDataStream(const StreamParams& stream);
+ // Removes a stream, returning true if found and removed.
+ bool RemoveAudioStream(const StreamSelector& selector);
+ bool RemoveVideoStream(const StreamSelector& selector);
+ bool RemoveDataStream(const StreamSelector& selector);
+
+ private:
+ std::vector<StreamParams> audio_;
+ std::vector<StreamParams> video_;
+ std::vector<StreamParams> data_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(MediaStreams);
+};
+
+template <class Condition>
+const StreamParams* GetStream(const StreamParamsVec& streams,
+ Condition condition) {
+ StreamParamsVec::const_iterator found =
+ std::find_if(streams.begin(), streams.end(), condition);
+ return found == streams.end() ? nullptr : &(*found);
+}
+
+template <class Condition>
+StreamParams* GetStream(StreamParamsVec& streams, Condition condition) {
+ StreamParamsVec::iterator found =
+ std::find_if(streams.begin(), streams.end(), condition);
+ return found == streams.end() ? nullptr : &(*found);
+}
+
+inline const StreamParams* GetStreamBySsrc(const StreamParamsVec& streams,
+ uint32_t ssrc) {
+ return GetStream(streams,
+ [&ssrc](const StreamParams& sp) { return sp.has_ssrc(ssrc); });
+}
+
+inline const StreamParams* GetStreamByIds(const StreamParamsVec& streams,
+ const std::string& groupid,
+ const std::string& id) {
+ return GetStream(streams, [&groupid, &id](const StreamParams& sp) {
+ return sp.groupid == groupid && sp.id == id;
+ });
+}
+
+inline StreamParams* GetStreamByIds(StreamParamsVec& streams,
+ const std::string& groupid,
+ const std::string& id) {
+ return GetStream(streams,
+ [&groupid, &id](const StreamParams& sp) {
+ return sp.groupid == groupid && sp.id == id;
+ });
+}
+
+inline const StreamParams* GetStream(const StreamParamsVec& streams,
+ const StreamSelector& selector) {
+ return GetStream(streams,
+ [&selector](const StreamParams& sp) { return selector.Matches(sp); });
+}
+
+template <class Condition>
+bool RemoveStream(StreamParamsVec* streams, Condition condition) {
+ auto iter(std::remove_if(streams->begin(), streams->end(), condition));
+ if (iter == streams->end())
+ return false;
+ streams->erase(iter, streams->end());
+ return true;
+}
+
+// Removes the stream from streams. Returns true if a stream is
+// found and removed.
+inline bool RemoveStream(StreamParamsVec* streams,
+ const StreamSelector& selector) {
+ return RemoveStream(streams,
+ [&selector](const StreamParams& sp) { return selector.Matches(sp); });
+}
+inline bool RemoveStreamBySsrc(StreamParamsVec* streams, uint32_t ssrc) {
+ return RemoveStream(streams,
+ [&ssrc](const StreamParams& sp) { return sp.has_ssrc(ssrc); });
+}
+inline bool RemoveStreamByIds(StreamParamsVec* streams,
+ const std::string& groupid,
+ const std::string& id) {
+ return RemoveStream(streams,
+ [&groupid, &id](const StreamParams& sp) {
+ return sp.groupid == groupid && sp.id == id;
+ });
+}
+
+// Checks if |sp| defines parameters for a single primary stream. There may
+// be an RTX stream or a FlexFEC stream (or both) associated with the primary
+// stream. Leaving as non-static so we can test this function.
+bool IsOneSsrcStream(const StreamParams& sp);
+
+// Checks if |sp| defines parameters for one Simulcast stream. There may be RTX
+// streams associated with the simulcast streams. Leaving as non-static so we
+// can test this function.
+bool IsSimulcastStream(const StreamParams& sp);
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_STREAMPARAMS_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/streamparams_unittest.cc b/third_party/libwebrtc/webrtc/media/base/streamparams_unittest.cc
new file mode 100644
index 0000000000..6e934ae7d6
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/streamparams_unittest.cc
@@ -0,0 +1,310 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/streamparams.h"
+#include "media/base/testutils.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/gunit.h"
+
+static const uint32_t kSsrcs1[] = {1};
+static const uint32_t kSsrcs2[] = {1, 2};
+static const uint32_t kSsrcs3[] = {1, 2, 3};
+static const uint32_t kRtxSsrcs3[] = {4, 5, 6};
+
+static cricket::StreamParams CreateStreamParamsWithSsrcGroup(
+ const std::string& semantics,
+ const uint32_t ssrcs_in[],
+ size_t len) {
+ cricket::StreamParams stream;
+ std::vector<uint32_t> ssrcs(ssrcs_in, ssrcs_in + len);
+ cricket::SsrcGroup sg(semantics, ssrcs);
+ stream.ssrcs = ssrcs;
+ stream.ssrc_groups.push_back(sg);
+ return stream;
+}
+
+TEST(SsrcGroup, EqualNotEqual) {
+ cricket::SsrcGroup ssrc_groups[] = {
+ cricket::SsrcGroup("ABC", MAKE_VECTOR(kSsrcs1)),
+ cricket::SsrcGroup("ABC", MAKE_VECTOR(kSsrcs2)),
+ cricket::SsrcGroup("Abc", MAKE_VECTOR(kSsrcs2)),
+ cricket::SsrcGroup("abc", MAKE_VECTOR(kSsrcs2)),
+ };
+
+ for (size_t i = 0; i < arraysize(ssrc_groups); ++i) {
+ for (size_t j = 0; j < arraysize(ssrc_groups); ++j) {
+ EXPECT_EQ((ssrc_groups[i] == ssrc_groups[j]), (i == j));
+ EXPECT_EQ((ssrc_groups[i] != ssrc_groups[j]), (i != j));
+ }
+ }
+}
+
+TEST(SsrcGroup, HasSemantics) {
+ cricket::SsrcGroup sg1("ABC", MAKE_VECTOR(kSsrcs1));
+ EXPECT_TRUE(sg1.has_semantics("ABC"));
+
+ cricket::SsrcGroup sg2("Abc", MAKE_VECTOR(kSsrcs1));
+ EXPECT_FALSE(sg2.has_semantics("ABC"));
+
+ cricket::SsrcGroup sg3("abc", MAKE_VECTOR(kSsrcs1));
+ EXPECT_FALSE(sg3.has_semantics("ABC"));
+}
+
+TEST(SsrcGroup, ToString) {
+ cricket::SsrcGroup sg1("ABC", MAKE_VECTOR(kSsrcs1));
+ EXPECT_STREQ("{semantics:ABC;ssrcs:[1]}", sg1.ToString().c_str());
+}
+
+TEST(StreamParams, CreateLegacy) {
+ const uint32_t ssrc = 7;
+ cricket::StreamParams one_sp = cricket::StreamParams::CreateLegacy(ssrc);
+ EXPECT_EQ(1U, one_sp.ssrcs.size());
+ EXPECT_EQ(ssrc, one_sp.first_ssrc());
+ EXPECT_TRUE(one_sp.has_ssrcs());
+ EXPECT_TRUE(one_sp.has_ssrc(ssrc));
+ EXPECT_FALSE(one_sp.has_ssrc(ssrc+1));
+ EXPECT_FALSE(one_sp.has_ssrc_groups());
+ EXPECT_EQ(0U, one_sp.ssrc_groups.size());
+}
+
+TEST(StreamParams, HasSsrcGroup) {
+ cricket::StreamParams sp =
+ CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, arraysize(kSsrcs2));
+ EXPECT_EQ(2U, sp.ssrcs.size());
+ EXPECT_EQ(kSsrcs2[0], sp.first_ssrc());
+ EXPECT_TRUE(sp.has_ssrcs());
+ EXPECT_TRUE(sp.has_ssrc(kSsrcs2[0]));
+ EXPECT_TRUE(sp.has_ssrc(kSsrcs2[1]));
+ EXPECT_TRUE(sp.has_ssrc_group("XYZ"));
+ EXPECT_EQ(1U, sp.ssrc_groups.size());
+ EXPECT_EQ(2U, sp.ssrc_groups[0].ssrcs.size());
+ EXPECT_EQ(kSsrcs2[0], sp.ssrc_groups[0].ssrcs[0]);
+ EXPECT_EQ(kSsrcs2[1], sp.ssrc_groups[0].ssrcs[1]);
+}
+
+TEST(StreamParams, GetSsrcGroup) {
+ cricket::StreamParams sp =
+ CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, arraysize(kSsrcs2));
+ EXPECT_EQ(NULL, sp.get_ssrc_group("xyz"));
+ EXPECT_EQ(&sp.ssrc_groups[0], sp.get_ssrc_group("XYZ"));
+}
+
+TEST(StreamParams, EqualNotEqual) {
+ cricket::StreamParams l1 = cricket::StreamParams::CreateLegacy(1);
+ cricket::StreamParams l2 = cricket::StreamParams::CreateLegacy(2);
+ cricket::StreamParams sg1 =
+ CreateStreamParamsWithSsrcGroup("ABC", kSsrcs1, arraysize(kSsrcs1));
+ cricket::StreamParams sg2 =
+ CreateStreamParamsWithSsrcGroup("ABC", kSsrcs2, arraysize(kSsrcs2));
+ cricket::StreamParams sg3 =
+ CreateStreamParamsWithSsrcGroup("Abc", kSsrcs2, arraysize(kSsrcs2));
+ cricket::StreamParams sg4 =
+ CreateStreamParamsWithSsrcGroup("abc", kSsrcs2, arraysize(kSsrcs2));
+ cricket::StreamParams sps[] = {l1, l2, sg1, sg2, sg3, sg4};
+
+ for (size_t i = 0; i < arraysize(sps); ++i) {
+ for (size_t j = 0; j < arraysize(sps); ++j) {
+ EXPECT_EQ((sps[i] == sps[j]), (i == j));
+ EXPECT_EQ((sps[i] != sps[j]), (i != j));
+ }
+ }
+}
+
+TEST(StreamParams, FidFunctions) {
+ uint32_t fid_ssrc;
+
+ cricket::StreamParams sp = cricket::StreamParams::CreateLegacy(1);
+ EXPECT_FALSE(sp.AddFidSsrc(10, 20));
+ EXPECT_TRUE(sp.AddFidSsrc(1, 2));
+ EXPECT_TRUE(sp.GetFidSsrc(1, &fid_ssrc));
+ EXPECT_EQ(2u, fid_ssrc);
+ EXPECT_FALSE(sp.GetFidSsrc(15, &fid_ssrc));
+
+ sp.add_ssrc(20);
+ EXPECT_TRUE(sp.AddFidSsrc(20, 30));
+ EXPECT_TRUE(sp.GetFidSsrc(20, &fid_ssrc));
+ EXPECT_EQ(30u, fid_ssrc);
+
+ // Manually create SsrcGroup to test bounds-checking
+ // in GetSecondarySsrc. We construct an invalid StreamParams
+ // for this.
+ std::vector<uint32_t> fid_vector;
+ fid_vector.push_back(13);
+ cricket::SsrcGroup invalid_fid_group(cricket::kFidSsrcGroupSemantics,
+ fid_vector);
+ cricket::StreamParams sp_invalid;
+ sp_invalid.add_ssrc(13);
+ sp_invalid.ssrc_groups.push_back(invalid_fid_group);
+ EXPECT_FALSE(sp_invalid.GetFidSsrc(13, &fid_ssrc));
+}
+
+TEST(StreamParams, GetPrimaryAndFidSsrcs) {
+ cricket::StreamParams sp;
+ sp.ssrcs.push_back(1);
+ sp.ssrcs.push_back(2);
+ sp.ssrcs.push_back(3);
+
+ std::vector<uint32_t> primary_ssrcs;
+ sp.GetPrimarySsrcs(&primary_ssrcs);
+ std::vector<uint32_t> fid_ssrcs;
+ sp.GetFidSsrcs(primary_ssrcs, &fid_ssrcs);
+ ASSERT_EQ(1u, primary_ssrcs.size());
+ EXPECT_EQ(1u, primary_ssrcs[0]);
+ ASSERT_EQ(0u, fid_ssrcs.size());
+
+ sp.ssrc_groups.push_back(
+ cricket::SsrcGroup(cricket::kSimSsrcGroupSemantics, sp.ssrcs));
+ sp.AddFidSsrc(1, 10);
+ sp.AddFidSsrc(2, 20);
+
+ primary_ssrcs.clear();
+ sp.GetPrimarySsrcs(&primary_ssrcs);
+ fid_ssrcs.clear();
+ sp.GetFidSsrcs(primary_ssrcs, &fid_ssrcs);
+ ASSERT_EQ(3u, primary_ssrcs.size());
+ EXPECT_EQ(1u, primary_ssrcs[0]);
+ EXPECT_EQ(2u, primary_ssrcs[1]);
+ EXPECT_EQ(3u, primary_ssrcs[2]);
+ ASSERT_EQ(2u, fid_ssrcs.size());
+ EXPECT_EQ(10u, fid_ssrcs[0]);
+ EXPECT_EQ(20u, fid_ssrcs[1]);
+}
+
+TEST(StreamParams, FecFrFunctions) {
+ uint32_t fecfr_ssrc;
+
+ cricket::StreamParams sp = cricket::StreamParams::CreateLegacy(1);
+ EXPECT_FALSE(sp.AddFecFrSsrc(10, 20));
+ EXPECT_TRUE(sp.AddFecFrSsrc(1, 2));
+ EXPECT_TRUE(sp.GetFecFrSsrc(1, &fecfr_ssrc));
+ EXPECT_EQ(2u, fecfr_ssrc);
+ EXPECT_FALSE(sp.GetFecFrSsrc(15, &fecfr_ssrc));
+
+ sp.add_ssrc(20);
+ EXPECT_TRUE(sp.AddFecFrSsrc(20, 30));
+ EXPECT_TRUE(sp.GetFecFrSsrc(20, &fecfr_ssrc));
+ EXPECT_EQ(30u, fecfr_ssrc);
+
+ // Manually create SsrcGroup to test bounds-checking
+ // in GetSecondarySsrc. We construct an invalid StreamParams
+ // for this.
+ std::vector<uint32_t> fecfr_vector;
+ fecfr_vector.push_back(13);
+ cricket::SsrcGroup invalid_fecfr_group(cricket::kFecFrSsrcGroupSemantics,
+ fecfr_vector);
+ cricket::StreamParams sp_invalid;
+ sp_invalid.add_ssrc(13);
+ sp_invalid.ssrc_groups.push_back(invalid_fecfr_group);
+ EXPECT_FALSE(sp_invalid.GetFecFrSsrc(13, &fecfr_ssrc));
+}
+
+TEST(StreamParams, ToString) {
+ cricket::StreamParams sp =
+ CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, arraysize(kSsrcs2));
+ EXPECT_STREQ("{ssrcs:[1,2];ssrc_groups:{semantics:XYZ;ssrcs:[1,2]};}",
+ sp.ToString().c_str());
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_LegacyStream) {
+ EXPECT_TRUE(
+ cricket::IsOneSsrcStream(cricket::StreamParams::CreateLegacy(13)));
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_SingleRtxStream) {
+ cricket::StreamParams stream;
+ stream.add_ssrc(13);
+ EXPECT_TRUE(stream.AddFidSsrc(13, 14));
+ EXPECT_TRUE(cricket::IsOneSsrcStream(stream));
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_SingleFlexfecStream) {
+ cricket::StreamParams stream;
+ stream.add_ssrc(13);
+ EXPECT_TRUE(stream.AddFecFrSsrc(13, 14));
+ EXPECT_TRUE(cricket::IsOneSsrcStream(stream));
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_SingleFlexfecAndRtxStream) {
+ cricket::StreamParams stream;
+ stream.add_ssrc(13);
+ EXPECT_TRUE(stream.AddFecFrSsrc(13, 14));
+ EXPECT_TRUE(stream.AddFidSsrc(13, 15));
+ EXPECT_TRUE(cricket::IsOneSsrcStream(stream));
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_SimulcastStream) {
+ EXPECT_FALSE(cricket::IsOneSsrcStream(
+ cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs2))));
+ EXPECT_FALSE(cricket::IsOneSsrcStream(
+ cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3))));
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_SimRtxStream) {
+ cricket::StreamParams stream =
+ cricket::CreateSimWithRtxStreamParams("cname",
+ MAKE_VECTOR(kSsrcs3),
+ MAKE_VECTOR(kRtxSsrcs3));
+ EXPECT_FALSE(cricket::IsOneSsrcStream(stream));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_LegacyStream) {
+ EXPECT_FALSE(
+ cricket::IsSimulcastStream(cricket::StreamParams::CreateLegacy(13)));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_SingleRtxStream) {
+ cricket::StreamParams stream;
+ stream.add_ssrc(13);
+ EXPECT_TRUE(stream.AddFidSsrc(13, 14));
+ EXPECT_FALSE(cricket::IsSimulcastStream(stream));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_SimulcastStream) {
+ EXPECT_TRUE(cricket::IsSimulcastStream(
+ cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs2))));
+ EXPECT_TRUE(cricket::IsSimulcastStream(
+ cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3))));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_SimRtxStream) {
+ cricket::StreamParams stream =
+ cricket::CreateSimWithRtxStreamParams("cname",
+ MAKE_VECTOR(kSsrcs3),
+ MAKE_VECTOR(kRtxSsrcs3));
+ EXPECT_TRUE(cricket::IsSimulcastStream(stream));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_InvalidStreams) {
+ // stream1 has extra non-sim, non-fid ssrc.
+ cricket::StreamParams stream1 =
+ cricket::CreateSimWithRtxStreamParams("cname",
+ MAKE_VECTOR(kSsrcs3),
+ MAKE_VECTOR(kRtxSsrcs3));
+ stream1.add_ssrc(25);
+ EXPECT_FALSE(cricket::IsSimulcastStream(stream1));
+
+ // stream2 has invalid fid-group (no primary).
+ cricket::StreamParams stream2;
+ stream2.add_ssrc(13);
+ EXPECT_TRUE(stream2.AddFidSsrc(13, 14));
+ std::remove(stream2.ssrcs.begin(), stream2.ssrcs.end(), 13u);
+ EXPECT_FALSE(cricket::IsSimulcastStream(stream2));
+
+ // stream3 has two SIM groups.
+ cricket::StreamParams stream3 =
+ cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs2));
+ std::vector<uint32_t> sim_ssrcs = MAKE_VECTOR(kRtxSsrcs3);
+ cricket::SsrcGroup sg(cricket::kSimSsrcGroupSemantics, sim_ssrcs);
+ for (size_t i = 0; i < sim_ssrcs.size(); i++) {
+ stream3.add_ssrc(sim_ssrcs[i]);
+ }
+ stream3.ssrc_groups.push_back(sg);
+ EXPECT_FALSE(cricket::IsSimulcastStream(stream3));
+}
diff --git a/third_party/libwebrtc/webrtc/media/base/test/mock_mediachannel.h b/third_party/libwebrtc/webrtc/media/base/test/mock_mediachannel.h
new file mode 100644
index 0000000000..fdfbf3440e
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/test/mock_mediachannel.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_TEST_MOCK_MEDIACHANNEL_H_
+#define MEDIA_BASE_TEST_MOCK_MEDIACHANNEL_H_
+
+#include "media/base/fakemediaengine.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockVideoMediaChannel : public cricket::FakeVideoMediaChannel {
+ public:
+ MockVideoMediaChannel()
+ : cricket::FakeVideoMediaChannel(nullptr, cricket::VideoOptions()) {}
+ MOCK_METHOD1(GetStats, bool(cricket::VideoMediaInfo*));
+};
+
+class MockVoiceMediaChannel : public cricket::FakeVoiceMediaChannel {
+ public:
+ MockVoiceMediaChannel()
+ : cricket::FakeVoiceMediaChannel(nullptr, cricket::AudioOptions()) {}
+ MOCK_METHOD1(GetStats, bool(cricket::VoiceMediaInfo*));
+};
+
+} // namespace webrtc
+
+#endif // MEDIA_BASE_TEST_MOCK_MEDIACHANNEL_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/testutils.cc b/third_party/libwebrtc/webrtc/media/base/testutils.cc
new file mode 100644
index 0000000000..f92d4013eb
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/testutils.cc
@@ -0,0 +1,170 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/testutils.h"
+
+#include <math.h>
+#include <algorithm>
+#include <memory>
+
+#include "api/video/video_frame.h"
+#include "media/base/videocapturer.h"
+#include "rtc_base/bytebuffer.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/stream.h"
+#include "rtc_base/stringutils.h"
+#include "rtc_base/testutils.h"
+
+namespace cricket {
+
+/////////////////////////////////////////////////////////////////////////
+// Implementation of RawRtpPacket
+/////////////////////////////////////////////////////////////////////////
+void RawRtpPacket::WriteToByteBuffer(uint32_t in_ssrc,
+ rtc::ByteBufferWriter* buf) const {
+ if (!buf) return;
+
+ buf->WriteUInt8(ver_to_cc);
+ buf->WriteUInt8(m_to_pt);
+ buf->WriteUInt16(sequence_number);
+ buf->WriteUInt32(timestamp);
+ buf->WriteUInt32(in_ssrc);
+ buf->WriteBytes(payload, sizeof(payload));
+}
+
+bool RawRtpPacket::ReadFromByteBuffer(rtc::ByteBufferReader* buf) {
+ if (!buf) return false;
+
+ bool ret = true;
+ ret &= buf->ReadUInt8(&ver_to_cc);
+ ret &= buf->ReadUInt8(&m_to_pt);
+ ret &= buf->ReadUInt16(&sequence_number);
+ ret &= buf->ReadUInt32(&timestamp);
+ ret &= buf->ReadUInt32(&ssrc);
+ ret &= buf->ReadBytes(payload, sizeof(payload));
+ return ret;
+}
+
+bool RawRtpPacket::SameExceptSeqNumTimestampSsrc(const RawRtpPacket& packet,
+ uint16_t seq,
+ uint32_t ts,
+ uint32_t ssc) const {
+ return sequence_number == seq &&
+ timestamp == ts &&
+ ver_to_cc == packet.ver_to_cc &&
+ m_to_pt == packet.m_to_pt &&
+ ssrc == ssc &&
+ 0 == memcmp(payload, packet.payload, sizeof(payload));
+}
+
+/////////////////////////////////////////////////////////////////////////
+// Implementation of RawRtcpPacket
+/////////////////////////////////////////////////////////////////////////
+void RawRtcpPacket::WriteToByteBuffer(rtc::ByteBufferWriter *buf) const {
+ if (!buf) return;
+
+ buf->WriteUInt8(ver_to_count);
+ buf->WriteUInt8(type);
+ buf->WriteUInt16(length);
+ buf->WriteBytes(payload, sizeof(payload));
+}
+
+bool RawRtcpPacket::ReadFromByteBuffer(rtc::ByteBufferReader* buf) {
+ if (!buf) return false;
+
+ bool ret = true;
+ ret &= buf->ReadUInt8(&ver_to_count);
+ ret &= buf->ReadUInt8(&type);
+ ret &= buf->ReadUInt16(&length);
+ ret &= buf->ReadBytes(payload, sizeof(payload));
+ return ret;
+}
+
+bool RawRtcpPacket::EqualsTo(const RawRtcpPacket& packet) const {
+ return ver_to_count == packet.ver_to_count &&
+ type == packet.type &&
+ length == packet.length &&
+ 0 == memcmp(payload, packet.payload, sizeof(payload));
+}
+
+// Implementation of VideoCaptureListener.
+VideoCapturerListener::VideoCapturerListener(VideoCapturer* capturer)
+ : capturer_(capturer),
+ last_capture_state_(CS_STARTING),
+ frame_count_(0),
+ frame_width_(0),
+ frame_height_(0),
+ resolution_changed_(false) {
+ capturer->SignalStateChange.connect(this,
+ &VideoCapturerListener::OnStateChange);
+ capturer->AddOrUpdateSink(this, rtc::VideoSinkWants());
+}
+
+VideoCapturerListener::~VideoCapturerListener() {
+ capturer_->RemoveSink(this);
+}
+
+void VideoCapturerListener::OnStateChange(VideoCapturer* capturer,
+ CaptureState result) {
+ last_capture_state_ = result;
+}
+
+void VideoCapturerListener::OnFrame(const webrtc::VideoFrame& frame) {
+ ++frame_count_;
+ if (1 == frame_count_) {
+ frame_width_ = frame.width();
+ frame_height_ = frame.height();
+ } else if (frame_width_ != frame.width() || frame_height_ != frame.height()) {
+ resolution_changed_ = true;
+ }
+}
+
+cricket::StreamParams CreateSimStreamParams(
+ const std::string& cname,
+ const std::vector<uint32_t>& ssrcs) {
+ cricket::StreamParams sp;
+ cricket::SsrcGroup sg(cricket::kSimSsrcGroupSemantics, ssrcs);
+ sp.ssrcs = ssrcs;
+ sp.ssrc_groups.push_back(sg);
+ sp.cname = cname;
+ return sp;
+}
+
+// There should be an rtx_ssrc per ssrc.
+cricket::StreamParams CreateSimWithRtxStreamParams(
+ const std::string& cname,
+ const std::vector<uint32_t>& ssrcs,
+ const std::vector<uint32_t>& rtx_ssrcs) {
+ cricket::StreamParams sp = CreateSimStreamParams(cname, ssrcs);
+ for (size_t i = 0; i < ssrcs.size(); ++i) {
+ sp.ssrcs.push_back(rtx_ssrcs[i]);
+ std::vector<uint32_t> fid_ssrcs;
+ fid_ssrcs.push_back(ssrcs[i]);
+ fid_ssrcs.push_back(rtx_ssrcs[i]);
+ cricket::SsrcGroup fid_group(cricket::kFidSsrcGroupSemantics, fid_ssrcs);
+ sp.ssrc_groups.push_back(fid_group);
+ }
+ return sp;
+}
+
+cricket::StreamParams CreatePrimaryWithFecFrStreamParams(
+ const std::string& cname,
+ uint32_t primary_ssrc,
+ uint32_t flexfec_ssrc) {
+ cricket::StreamParams sp;
+ cricket::SsrcGroup sg(cricket::kFecFrSsrcGroupSemantics,
+ {primary_ssrc, flexfec_ssrc});
+ sp.ssrcs = {primary_ssrc};
+ sp.ssrc_groups.push_back(sg);
+ sp.cname = cname;
+ return sp;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/testutils.h b/third_party/libwebrtc/webrtc/media/base/testutils.h
new file mode 100644
index 0000000000..8ee77c1852
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/testutils.h
@@ -0,0 +1,151 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_TESTUTILS_H_
+#define MEDIA_BASE_TESTUTILS_H_
+
+#include <string>
+#include <vector>
+
+#include "media/base/mediachannel.h"
+#include "media/base/videocapturer.h"
+#include "media/base/videocommon.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/basictypes.h"
+#include "rtc_base/sigslot.h"
+#include "rtc_base/window.h"
+
+namespace rtc {
+class ByteBufferReader;
+class ByteBufferWriter;
+class StreamInterface;
+}
+
+namespace webrtc {
+class VideoFrame;
+}
+
+namespace cricket {
+
+// Returns size of 420 image with rounding on chroma for odd sizes.
+#define I420_SIZE(w, h) (w * h + (((w + 1) / 2) * ((h + 1) / 2)) * 2)
+// Returns size of ARGB image.
+#define ARGB_SIZE(w, h) (w * h * 4)
+
+template <class T> inline std::vector<T> MakeVector(const T a[], size_t s) {
+ return std::vector<T>(a, a + s);
+}
+#define MAKE_VECTOR(a) cricket::MakeVector(a, arraysize(a))
+
+struct RtpDumpPacket;
+class RtpDumpWriter;
+
+struct RawRtpPacket {
+ void WriteToByteBuffer(uint32_t in_ssrc, rtc::ByteBufferWriter* buf) const;
+ bool ReadFromByteBuffer(rtc::ByteBufferReader* buf);
+ // Check if this packet is the same as the specified packet except the
+ // sequence number and timestamp, which should be the same as the specified
+ // parameters.
+ bool SameExceptSeqNumTimestampSsrc(const RawRtpPacket& packet,
+ uint16_t seq,
+ uint32_t ts,
+ uint32_t ssc) const;
+ int size() const { return 28; }
+
+ uint8_t ver_to_cc;
+ uint8_t m_to_pt;
+ uint16_t sequence_number;
+ uint32_t timestamp;
+ uint32_t ssrc;
+ char payload[16];
+};
+
+struct RawRtcpPacket {
+ void WriteToByteBuffer(rtc::ByteBufferWriter* buf) const;
+ bool ReadFromByteBuffer(rtc::ByteBufferReader* buf);
+ bool EqualsTo(const RawRtcpPacket& packet) const;
+
+ uint8_t ver_to_count;
+ uint8_t type;
+ uint16_t length;
+ char payload[16];
+};
+
+// Test helper for testing VideoCapturer implementations.
+class VideoCapturerListener
+ : public sigslot::has_slots<>,
+ public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ explicit VideoCapturerListener(VideoCapturer* cap);
+ ~VideoCapturerListener();
+
+ CaptureState last_capture_state() const { return last_capture_state_; }
+ int frame_count() const { return frame_count_; }
+ int frame_width() const { return frame_width_; }
+ int frame_height() const { return frame_height_; }
+ bool resolution_changed() const { return resolution_changed_; }
+
+ void OnStateChange(VideoCapturer* capturer, CaptureState state);
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+
+ private:
+ VideoCapturer* capturer_;
+ CaptureState last_capture_state_;
+ int frame_count_;
+ int frame_width_;
+ int frame_height_;
+ bool resolution_changed_;
+};
+
+class VideoMediaErrorCatcher : public sigslot::has_slots<> {
+ public:
+ VideoMediaErrorCatcher() : ssrc_(0), error_(VideoMediaChannel::ERROR_NONE) { }
+ uint32_t ssrc() const { return ssrc_; }
+ VideoMediaChannel::Error error() const { return error_; }
+ void OnError(uint32_t ssrc, VideoMediaChannel::Error error) {
+ ssrc_ = ssrc;
+ error_ = error;
+ }
+ private:
+ uint32_t ssrc_;
+ VideoMediaChannel::Error error_;
+};
+
+// Checks whether |codecs| contains |codec|; checks using Codec::Matches().
+template <class C>
+bool ContainsMatchingCodec(const std::vector<C>& codecs, const C& codec) {
+ typename std::vector<C>::const_iterator it;
+ for (it = codecs.begin(); it != codecs.end(); ++it) {
+ if (it->Matches(codec)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+// Create Simulcast StreamParams with given |ssrcs| and |cname|.
+cricket::StreamParams CreateSimStreamParams(const std::string& cname,
+ const std::vector<uint32_t>& ssrcs);
+// Create Simulcast stream with given |ssrcs| and |rtx_ssrcs|.
+// The number of |rtx_ssrcs| must match number of |ssrcs|.
+cricket::StreamParams CreateSimWithRtxStreamParams(
+ const std::string& cname,
+ const std::vector<uint32_t>& ssrcs,
+ const std::vector<uint32_t>& rtx_ssrcs);
+
+// Create StreamParams with single primary SSRC and corresponding FlexFEC SSRC.
+cricket::StreamParams CreatePrimaryWithFecFrStreamParams(
+ const std::string& cname,
+ uint32_t primary_ssrc,
+ uint32_t flexfec_ssrc);
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_TESTUTILS_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/turnutils.cc b/third_party/libwebrtc/webrtc/media/base/turnutils.cc
new file mode 100644
index 0000000000..cf258042e8
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/turnutils.cc
@@ -0,0 +1,127 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/turnutils.h"
+
+#include "p2p/base/stun.h"
+#include "rtc_base/byteorder.h"
+#include "rtc_base/checks.h"
+
+namespace cricket {
+
+namespace {
+
+const size_t kTurnChannelHeaderLength = 4;
+
+bool IsTurnChannelData(const uint8_t* data, size_t length) {
+ return length >= kTurnChannelHeaderLength && ((*data & 0xC0) == 0x40);
+}
+
+bool IsTurnSendIndicationPacket(const uint8_t* data, size_t length) {
+ if (length < kStunHeaderSize) {
+ return false;
+ }
+
+ uint16_t type = rtc::GetBE16(data);
+ return (type == TURN_SEND_INDICATION);
+}
+
+} // namespace
+
+bool UnwrapTurnPacket(const uint8_t* packet,
+ size_t packet_size,
+ size_t* content_position,
+ size_t* content_size) {
+ if (IsTurnChannelData(packet, packet_size)) {
+ // Turn Channel Message header format.
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | Channel Number | Length |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | |
+ // / Application Data /
+ // / /
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ size_t length = rtc::GetBE16(&packet[2]);
+ if (length + kTurnChannelHeaderLength > packet_size) {
+ return false;
+ }
+
+ *content_position = kTurnChannelHeaderLength;
+ *content_size = length;
+ return true;
+ }
+
+ if (IsTurnSendIndicationPacket(packet, packet_size)) {
+ // Validate STUN message length.
+ const size_t stun_message_length = rtc::GetBE16(&packet[2]);
+ if (stun_message_length + kStunHeaderSize != packet_size) {
+ return false;
+ }
+
+ // First skip mandatory stun header which is of 20 bytes.
+ size_t pos = kStunHeaderSize;
+ // Loop through STUN attributes until we find STUN DATA attribute.
+ while (pos < packet_size) {
+ // Keep reading STUN attributes until we hit DATA attribute.
+ // Attribute will be a TLV structure.
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | Type | Length |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | Value (variable) ....
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // The value in the length field MUST contain the length of the Value
+ // part of the attribute, prior to padding, measured in bytes. Since
+ // STUN aligns attributes on 32-bit boundaries, attributes whose content
+ // is not a multiple of 4 bytes are padded with 1, 2, or 3 bytes of
+ // padding so that its value contains a multiple of 4 bytes. The
+ // padding bits are ignored, and may be any value.
+ uint16_t attr_type, attr_length;
+ const int kAttrHeaderLength = sizeof(attr_type) + sizeof(attr_length);
+
+ if (packet_size < pos + kAttrHeaderLength) {
+ return false;
+ }
+
+ // Getting attribute type and length.
+ attr_type = rtc::GetBE16(&packet[pos]);
+ attr_length = rtc::GetBE16(&packet[pos + sizeof(attr_type)]);
+
+ pos += kAttrHeaderLength; // Skip STUN_DATA_ATTR header.
+
+ // Checking for bogus attribute length.
+ if (pos + attr_length > packet_size) {
+ return false;
+ }
+
+ if (attr_type == STUN_ATTR_DATA) {
+ *content_position = pos;
+ *content_size = attr_length;
+ return true;
+ }
+
+ pos += attr_length;
+ if ((attr_length % 4) != 0) {
+ pos += (4 - (attr_length % 4));
+ }
+ }
+
+ // There is no data attribute present in the message.
+ return false;
+ }
+
+ // This is not a TURN packet.
+ *content_position = 0;
+ *content_size = packet_size;
+ return true;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/turnutils.h b/third_party/libwebrtc/webrtc/media/base/turnutils.h
new file mode 100644
index 0000000000..13ed26b7a5
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/turnutils.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_TURNUTILS_H_
+#define MEDIA_BASE_TURNUTILS_H_
+
+#include <cstddef>
+#include <cstdint>
+
+namespace cricket {
+
+struct PacketOptions;
+
+// Finds data location within a TURN Channel Message or TURN Send Indication
+// message.
+bool UnwrapTurnPacket(const uint8_t* packet,
+ size_t packet_size,
+ size_t* content_position,
+ size_t* content_size);
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_TURNUTILS_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/turnutils_unittest.cc b/third_party/libwebrtc/webrtc/media/base/turnutils_unittest.cc
new file mode 100644
index 0000000000..ca1282760b
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/turnutils_unittest.cc
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/turnutils.h"
+
+#include <stddef.h>
+
+#include "rtc_base/gunit.h"
+
+namespace cricket {
+
+// Invalid TURN send indication messages. Messages are proper STUN
+// messages with incorrect values in attributes.
+TEST(TurnUtilsTest, InvalidTurnSendIndicationMessages) {
+ size_t content_pos = SIZE_MAX;
+ size_t content_size = SIZE_MAX;
+
+ // Stun Indication message with Zero length
+ uint8_t kTurnSendIndicationMsgWithNoAttributes[] = {
+ 0x00, 0x16, 0x00, 0x00, // Zero length
+ 0x21, 0x12, 0xA4, 0x42, // magic cookie
+ '0', '1', '2', '3', // transaction id
+ '4', '5', '6', '7', '8', '9', 'a', 'b',
+ };
+ EXPECT_FALSE(UnwrapTurnPacket(kTurnSendIndicationMsgWithNoAttributes,
+ sizeof(kTurnSendIndicationMsgWithNoAttributes),
+ &content_pos, &content_size));
+ EXPECT_EQ(SIZE_MAX, content_pos);
+ EXPECT_EQ(SIZE_MAX, content_size);
+
+ // Stun Send Indication message with invalid length in stun header.
+ const uint8_t kTurnSendIndicationMsgWithInvalidLength[] = {
+ 0x00, 0x16, 0xFF, 0x00, // length of 0xFF00
+ 0x21, 0x12, 0xA4, 0x42, // magic cookie
+ '0', '1', '2', '3', // transaction id
+ '4', '5', '6', '7', '8', '9', 'a', 'b',
+ };
+ EXPECT_FALSE(UnwrapTurnPacket(kTurnSendIndicationMsgWithInvalidLength,
+ sizeof(kTurnSendIndicationMsgWithInvalidLength),
+ &content_pos, &content_size));
+ EXPECT_EQ(SIZE_MAX, content_pos);
+ EXPECT_EQ(SIZE_MAX, content_size);
+
+ // Stun Send Indication message with no DATA attribute in message.
+ const uint8_t kTurnSendIndicatinMsgWithNoDataAttribute[] = {
+ 0x00, 0x16, 0x00, 0x08, // length of
+ 0x21, 0x12, 0xA4, 0x42, // magic cookie
+ '0', '1', '2', '3', // transaction id
+ '4', '5', '6', '7', '8', '9', 'a', 'b',
+ 0x00, 0x20, 0x00, 0x04, // Mapped address.
+ 0x00, 0x00, 0x00, 0x00,
+ };
+ EXPECT_FALSE(
+ UnwrapTurnPacket(kTurnSendIndicatinMsgWithNoDataAttribute,
+ sizeof(kTurnSendIndicatinMsgWithNoDataAttribute),
+ &content_pos, &content_size));
+ EXPECT_EQ(SIZE_MAX, content_pos);
+ EXPECT_EQ(SIZE_MAX, content_size);
+}
+
+// Valid TURN Send Indication messages.
+TEST(TurnUtilsTest, ValidTurnSendIndicationMessage) {
+ size_t content_pos = SIZE_MAX;
+ size_t content_size = SIZE_MAX;
+ // A valid STUN indication message with a valid RTP header in data attribute
+ // payload field and no extension bit set.
+ const uint8_t kTurnSendIndicationMsgWithoutRtpExtension[] = {
+ 0x00, 0x16, 0x00, 0x18, // length of
+ 0x21, 0x12, 0xA4, 0x42, // magic cookie
+ '0', '1', '2', '3', // transaction id
+ '4', '5', '6', '7', '8', '9', 'a', 'b',
+ 0x00, 0x20, 0x00, 0x04, // Mapped address.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x13, 0x00, 0x0C, // Data attribute.
+ 0x80, 0x00, 0x00, 0x00, // RTP packet.
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ };
+ EXPECT_TRUE(UnwrapTurnPacket(
+ kTurnSendIndicationMsgWithoutRtpExtension,
+ sizeof(kTurnSendIndicationMsgWithoutRtpExtension), &content_pos,
+ &content_size));
+ EXPECT_EQ(12U, content_size);
+ EXPECT_EQ(32U, content_pos);
+}
+
+// Verify that parsing of valid TURN Channel Messages.
+TEST(TurnUtilsTest, ValidTurnChannelMessages) {
+ const uint8_t kTurnChannelMsgWithRtpPacket[] = {
+ 0x40, 0x00, 0x00, 0x0C,
+ 0x80, 0x00, 0x00, 0x00, // RTP packet.
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ };
+
+ size_t content_pos = 0, content_size = 0;
+ EXPECT_TRUE(UnwrapTurnPacket(
+ kTurnChannelMsgWithRtpPacket,
+ sizeof(kTurnChannelMsgWithRtpPacket), &content_pos, &content_size));
+ EXPECT_EQ(12U, content_size);
+ EXPECT_EQ(4U, content_pos);
+}
+
+TEST(TurnUtilsTest, ChannelMessageZeroLength) {
+ const uint8_t kTurnChannelMsgWithZeroLength[] = {0x40, 0x00, 0x00, 0x00};
+ size_t content_pos = SIZE_MAX;
+ size_t content_size = SIZE_MAX;
+ EXPECT_TRUE(UnwrapTurnPacket(kTurnChannelMsgWithZeroLength,
+ sizeof(kTurnChannelMsgWithZeroLength),
+ &content_pos, &content_size));
+ EXPECT_EQ(4, content_pos);
+ EXPECT_EQ(0, content_size);
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/videoadapter.cc b/third_party/libwebrtc/webrtc/media/base/videoadapter.cc
new file mode 100644
index 0000000000..8756c15a25
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videoadapter.cc
@@ -0,0 +1,293 @@
+/*
+ * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/videoadapter.h"
+
+#include <algorithm>
+#include <cmath>
+#include <cstdlib>
+#include <limits>
+#include <utility>
+
+#include "api/optional.h"
+#include "media/base/mediaconstants.h"
+#include "media/base/videocommon.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace {
+struct Fraction {
+ int numerator;
+ int denominator;
+
+ // Determines number of output pixels if both width and height of an input of
+ // |input_pixels| pixels is scaled with the fraction numerator / denominator.
+ int scale_pixel_count(int input_pixels) {
+ return (numerator * numerator * input_pixels) / (denominator * denominator);
+ }
+};
+
+// Round |value_to_round| to a multiple of |multiple|. Prefer rounding upwards,
+// but never more than |max_value|.
+int roundUp(int value_to_round, int multiple, int max_value) {
+ const int rounded_value =
+ (value_to_round + multiple - 1) / multiple * multiple;
+ return rounded_value <= max_value ? rounded_value
+ : (max_value / multiple * multiple);
+}
+
+// Generates a scale factor that makes |input_pixels| close to |target_pixels|,
+// but no higher than |max_pixels|.
+Fraction FindScale(int input_pixels, int target_pixels, int max_pixels) {
+ // This function only makes sense for a positive target.
+ RTC_DCHECK_GT(target_pixels, 0);
+ RTC_DCHECK_GT(max_pixels, 0);
+ RTC_DCHECK_GE(max_pixels, target_pixels);
+
+ // Don't scale up original.
+ if (target_pixels >= input_pixels)
+ return Fraction{1, 1};
+
+ Fraction current_scale = Fraction{1, 1};
+ Fraction best_scale = Fraction{1, 1};
+ // The minimum (absolute) difference between the number of output pixels and
+ // the target pixel count.
+ int min_pixel_diff = std::numeric_limits<int>::max();
+ if (input_pixels <= max_pixels) {
+ // Start condition for 1/1 case, if it is less than max.
+ min_pixel_diff = std::abs(input_pixels - target_pixels);
+ }
+
+ // Alternately scale down by 2/3 and 3/4. This results in fractions which are
+ // effectively scalable. For instance, starting at 1280x720 will result in
+ // the series (3/4) => 960x540, (1/2) => 640x360, (3/8) => 480x270,
+ // (1/4) => 320x180, (3/16) => 240x125, (1/8) => 160x90.
+ while (current_scale.scale_pixel_count(input_pixels) > target_pixels) {
+ if (current_scale.numerator % 3 == 0 &&
+ current_scale.denominator % 2 == 0) {
+ // Multiply by 2/3.
+ current_scale.numerator /= 3;
+ current_scale.denominator /= 2;
+ } else {
+ // Multiply by 3/4.
+ current_scale.numerator *= 3;
+ current_scale.denominator *= 4;
+ }
+
+ int output_pixels = current_scale.scale_pixel_count(input_pixels);
+ if (output_pixels <= max_pixels) {
+ int diff = std::abs(target_pixels - output_pixels);
+ if (diff < min_pixel_diff) {
+ min_pixel_diff = diff;
+ best_scale = current_scale;
+ }
+ }
+ }
+
+ return best_scale;
+}
+} // namespace
+
+namespace cricket {
+
+VideoAdapter::VideoAdapter(int required_resolution_alignment)
+ : frames_in_(0),
+ frames_out_(0),
+ frames_scaled_(0),
+ adaption_changes_(0),
+ previous_width_(0),
+ previous_height_(0),
+ required_resolution_alignment_(required_resolution_alignment),
+ resolution_request_target_pixel_count_(std::numeric_limits<int>::max()),
+ resolution_request_max_pixel_count_(std::numeric_limits<int>::max()),
+ max_framerate_request_(std::numeric_limits<int>::max()) {}
+
+VideoAdapter::VideoAdapter() : VideoAdapter(1) {}
+
+VideoAdapter::~VideoAdapter() {}
+
+bool VideoAdapter::KeepFrame(int64_t in_timestamp_ns) {
+ rtc::CritScope cs(&critical_section_);
+ if (max_framerate_request_ <= 0)
+ return false;
+
+ int64_t frame_interval_ns =
+ requested_format_ ? requested_format_->interval : 0;
+
+ // If |max_framerate_request_| is not set, it will default to maxint, which
+ // will lead to a frame_interval_ns rounded to 0.
+ frame_interval_ns = std::max<int64_t>(
+ frame_interval_ns, rtc::kNumNanosecsPerSec / max_framerate_request_);
+
+ if (frame_interval_ns <= 0) {
+ // Frame rate throttling not enabled.
+ return true;
+ }
+
+ if (next_frame_timestamp_ns_) {
+ // Time until next frame should be outputted.
+ const int64_t time_until_next_frame_ns =
+ (*next_frame_timestamp_ns_ - in_timestamp_ns);
+
+ // Continue if timestamp is within expected range.
+ if (std::abs(time_until_next_frame_ns) < 2 * frame_interval_ns) {
+ // Drop if a frame shouldn't be outputted yet.
+ if (time_until_next_frame_ns > 0)
+ return false;
+ // Time to output new frame.
+ *next_frame_timestamp_ns_ += frame_interval_ns;
+ return true;
+ }
+ }
+
+ // First timestamp received or timestamp is way outside expected range, so
+ // reset. Set first timestamp target to just half the interval to prefer
+ // keeping frames in case of jitter.
+ next_frame_timestamp_ns_ = in_timestamp_ns + frame_interval_ns / 2;
+ return true;
+}
+
+bool VideoAdapter::AdaptFrameResolution(int in_width,
+ int in_height,
+ int64_t in_timestamp_ns,
+ int* cropped_width,
+ int* cropped_height,
+ int* out_width,
+ int* out_height) {
+ rtc::CritScope cs(&critical_section_);
+ ++frames_in_;
+
+ // The max output pixel count is the minimum of the requests from
+ // OnOutputFormatRequest and OnResolutionRequest.
+ int max_pixel_count = resolution_request_max_pixel_count_;
+ if (scale_) {
+ // We calculate the scaled pixel count from the in_width and in_height,
+ // which is the input resolution. We then take the minimum of the scaled
+ // resolution and the current max_pixel_count. This will allow the
+ // quality scaler to reduce the resolution in response to load, but we
+ // will never go above the requested scaled resolution.
+ int scaled_pixel_count = (in_width*in_height/scale_resolution_by_)/scale_resolution_by_;
+ max_pixel_count = std::min(max_pixel_count, scaled_pixel_count);
+ }
+
+ if (requested_format_) {
+ max_pixel_count = std::min(
+ max_pixel_count, requested_format_->width * requested_format_->height);
+ }
+ int target_pixel_count =
+ std::min(resolution_request_target_pixel_count_, max_pixel_count);
+
+ // Drop the input frame if necessary.
+ if (max_pixel_count <= 0 || !KeepFrame(in_timestamp_ns)) {
+ // Show VAdapt log every 90 frames dropped. (3 seconds)
+ if ((frames_in_ - frames_out_) % 90 == 0) {
+ // TODO(fbarchard): Reduce to LS_VERBOSE when adapter info is not needed
+ // in default calls.
+ RTC_LOG(LS_INFO) << "VAdapt Drop Frame: scaled " << frames_scaled_
+ << " / out " << frames_out_ << " / in " << frames_in_
+ << " Changes: " << adaption_changes_
+ << " Input: " << in_width << "x" << in_height
+ << " timestamp: " << in_timestamp_ns << " Output: i"
+ << (requested_format_ ? requested_format_->interval : 0);
+ }
+
+ // Drop frame.
+ return false;
+ }
+
+ // Calculate how the input should be cropped.
+ if (!requested_format_ ||
+ requested_format_->width == 0 || requested_format_->height == 0) {
+ *cropped_width = in_width;
+ *cropped_height = in_height;
+ } else {
+ // Adjust |requested_format_| orientation to match input.
+ if ((in_width > in_height) !=
+ (requested_format_->width > requested_format_->height)) {
+ std::swap(requested_format_->width, requested_format_->height);
+ }
+ const float requested_aspect =
+ requested_format_->width /
+ static_cast<float>(requested_format_->height);
+ *cropped_width =
+ std::min(in_width, static_cast<int>(in_height * requested_aspect));
+ *cropped_height =
+ std::min(in_height, static_cast<int>(in_width / requested_aspect));
+ }
+ const Fraction scale = FindScale((*cropped_width) * (*cropped_height),
+ target_pixel_count, max_pixel_count);
+ // Adjust cropping slightly to get even integer output size and a perfect
+ // scale factor. Make sure the resulting dimensions are aligned correctly
+ // to be nice to hardware encoders.
+ *cropped_width =
+ roundUp(*cropped_width,
+ scale.denominator * required_resolution_alignment_, in_width);
+ *cropped_height =
+ roundUp(*cropped_height,
+ scale.denominator * required_resolution_alignment_, in_height);
+ RTC_DCHECK_EQ(0, *cropped_width % scale.denominator);
+ RTC_DCHECK_EQ(0, *cropped_height % scale.denominator);
+
+ // Calculate final output size.
+ *out_width = *cropped_width / scale.denominator * scale.numerator;
+ *out_height = *cropped_height / scale.denominator * scale.numerator;
+ RTC_DCHECK_EQ(0, *out_width % required_resolution_alignment_);
+ RTC_DCHECK_EQ(0, *out_height % required_resolution_alignment_);
+
+ ++frames_out_;
+ if (scale.numerator != scale.denominator)
+ ++frames_scaled_;
+
+ if ((previous_width_ || scale_) && (previous_width_ != *out_width ||
+ previous_height_ != *out_height)) {
+ ++adaption_changes_;
+ RTC_LOG(LS_INFO) << "Frame size changed: scaled " << frames_scaled_
+ << " / out " << frames_out_ << " / in " << frames_in_
+ << " Changes: " << adaption_changes_
+ << " Input: " << in_width << "x" << in_height
+ << " Scale: " << scale.numerator << "/"
+ << scale.denominator << " Output: " << *out_width << "x"
+ << *out_height << " i"
+ << (requested_format_ ? requested_format_->interval : 0);
+ }
+
+ previous_width_ = *out_width;
+ previous_height_ = *out_height;
+
+ return true;
+}
+
+void VideoAdapter::OnOutputFormatRequest(const VideoFormat& format) {
+ rtc::CritScope cs(&critical_section_);
+ requested_format_ = format;
+ next_frame_timestamp_ns_ = rtc::nullopt;
+}
+
+void VideoAdapter::OnResolutionFramerateRequest(
+ const rtc::Optional<int>& target_pixel_count,
+ int max_pixel_count,
+ int max_framerate_fps) {
+ rtc::CritScope cs(&critical_section_);
+ resolution_request_max_pixel_count_ = max_pixel_count;
+ resolution_request_target_pixel_count_ =
+ target_pixel_count.value_or(resolution_request_max_pixel_count_);
+ max_framerate_request_ = max_framerate_fps;
+}
+
+void VideoAdapter::OnScaleResolutionBy(
+ rtc::Optional<float> scale_resolution_by) {
+ rtc::CritScope cs(&critical_section_);
+ scale_resolution_by_ = scale_resolution_by.value_or(1.0);
+ RTC_DCHECK_GE(scale_resolution_by_, 1.0);
+ scale_ = static_cast<bool>(scale_resolution_by);
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/videoadapter.h b/third_party/libwebrtc/webrtc/media/base/videoadapter.h
new file mode 100644
index 0000000000..e7c9fa6830
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videoadapter.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_VIDEOADAPTER_H_
+#define MEDIA_BASE_VIDEOADAPTER_H_
+
+#include "api/optional.h"
+#include "media/base/videocommon.h"
+#include "rtc_base/constructormagic.h"
+#include "rtc_base/criticalsection.h"
+
+namespace cricket {
+
+// VideoAdapter adapts an input video frame to an output frame based on the
+// specified input and output formats. The adaptation includes dropping frames
+// to reduce frame rate and scaling frames.
+// VideoAdapter is thread safe.
+class VideoAdapter {
+ public:
+ VideoAdapter();
+ // The output frames will have height and width that is divisible by
+ // |required_resolution_alignment|.
+ explicit VideoAdapter(int required_resolution_alignment);
+ virtual ~VideoAdapter();
+
+ // Return the adapted resolution and cropping parameters given the
+ // input resolution. The input frame should first be cropped, then
+ // scaled to the final output resolution. Returns true if the frame
+ // should be adapted, and false if it should be dropped.
+ bool AdaptFrameResolution(int in_width,
+ int in_height,
+ int64_t in_timestamp_ns,
+ int* cropped_width,
+ int* cropped_height,
+ int* out_width,
+ int* out_height);
+
+ // Requests the output frame size and frame interval from
+ // |AdaptFrameResolution| to not be larger than |format|. Also, the input
+ // frame size will be cropped to match the requested aspect ratio. The
+ // requested aspect ratio is orientation agnostic and will be adjusted to
+ // maintain the input orientation, so it doesn't matter if e.g. 1280x720 or
+ // 720x1280 is requested.
+ void OnOutputFormatRequest(const VideoFormat& format);
+
+ // Requests the output frame size from |AdaptFrameResolution| to have as close
+ // as possible to |target_pixel_count| pixels (if set) but no more than
+ // |max_pixel_count|.
+ // |max_framerate_fps| is essentially analogous to |max_pixel_count|, but for
+ // framerate rather than resolution.
+ // Set |max_pixel_count| and/or |max_framerate_fps| to
+ // std::numeric_limit<int>::max() if no upper limit is desired.
+ void OnResolutionFramerateRequest(
+ const rtc::Optional<int>& target_pixel_count,
+ int max_pixel_count,
+ int max_framerate_fps);
+
+ // Requests the output frame size from |AdaptFrameResolution| be scaled
+ // down from the input by a factor of scale_resolution_by (min 1.0)
+ virtual void OnScaleResolutionBy(rtc::Optional<float> scale_resolution_by);
+
+ private:
+ // Determine if frame should be dropped based on input fps and requested fps.
+ bool KeepFrame(int64_t in_timestamp_ns);
+
+ int frames_in_; // Number of input frames.
+ int frames_out_; // Number of output frames.
+ int frames_scaled_; // Number of frames scaled.
+ int adaption_changes_; // Number of changes in scale factor.
+ int previous_width_; // Previous adapter output width.
+ int previous_height_; // Previous adapter output height.
+ // Resolution must be divisible by this factor.
+ const int required_resolution_alignment_;
+ // The target timestamp for the next frame based on requested format.
+ rtc::Optional<int64_t> next_frame_timestamp_ns_
+ RTC_GUARDED_BY(critical_section_);
+
+ // Max number of pixels requested via calls to OnOutputFormatRequest,
+ // OnResolutionRequest respectively.
+ // The adapted output format is the minimum of these.
+ rtc::Optional<VideoFormat> requested_format_
+ RTC_GUARDED_BY(critical_section_);
+ int resolution_request_target_pixel_count_ RTC_GUARDED_BY(critical_section_);
+ int resolution_request_max_pixel_count_ RTC_GUARDED_BY(critical_section_);
+ int max_framerate_request_ RTC_GUARDED_BY(critical_section_);
+ float scale_resolution_by_ RTC_GUARDED_BY(critical_section_);
+ bool scale_ RTC_GUARDED_BY(critical_section_);
+
+ // The critical section to protect the above variables.
+ rtc::CriticalSection critical_section_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(VideoAdapter);
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_VIDEOADAPTER_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/videoadapter_unittest.cc b/third_party/libwebrtc/webrtc/media/base/videoadapter_unittest.cc
new file mode 100644
index 0000000000..039d1da636
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videoadapter_unittest.cc
@@ -0,0 +1,1096 @@
+/*
+ * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <limits.h> // For INT_MAX
+
+#include <limits>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "media/base/fakevideocapturer.h"
+#include "media/base/mediachannel.h"
+#include "media/base/testutils.h"
+#include "media/base/videoadapter.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/logging.h"
+
+namespace cricket {
+namespace {
+const int kDefaultFps = 30;
+} // namespace
+
+class VideoAdapterTest : public testing::Test {
+ public:
+ virtual void SetUp() {
+ capturer_.reset(new FakeVideoCapturer);
+ capture_format_ = capturer_->GetSupportedFormats()->at(0);
+ capture_format_.interval = VideoFormat::FpsToInterval(kDefaultFps);
+
+ listener_.reset(new VideoCapturerListener(&adapter_));
+ capturer_->AddOrUpdateSink(listener_.get(), rtc::VideoSinkWants());
+ }
+
+ virtual void TearDown() {
+ // Explicitly disconnect the VideoCapturer before to avoid data races
+ // (frames delivered to VideoCapturerListener while it's being destructed).
+ capturer_->RemoveSink(listener_.get());
+ }
+
+ protected:
+ class VideoCapturerListener
+ : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ struct Stats {
+ int captured_frames;
+ int dropped_frames;
+ bool last_adapt_was_no_op;
+
+ int cropped_width;
+ int cropped_height;
+ int out_width;
+ int out_height;
+ };
+
+ explicit VideoCapturerListener(VideoAdapter* adapter)
+ : video_adapter_(adapter),
+ cropped_width_(0),
+ cropped_height_(0),
+ out_width_(0),
+ out_height_(0),
+ captured_frames_(0),
+ dropped_frames_(0),
+ last_adapt_was_no_op_(false) {}
+
+ void OnFrame(const webrtc::VideoFrame& frame) {
+ rtc::CritScope lock(&crit_);
+ const int in_width = frame.width();
+ const int in_height = frame.height();
+ int cropped_width;
+ int cropped_height;
+ int out_width;
+ int out_height;
+ if (video_adapter_->AdaptFrameResolution(
+ in_width, in_height,
+ frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec,
+ &cropped_width, &cropped_height, &out_width, &out_height)) {
+ cropped_width_ = cropped_width;
+ cropped_height_ = cropped_height;
+ out_width_ = out_width;
+ out_height_ = out_height;
+ last_adapt_was_no_op_ =
+ (in_width == cropped_width && in_height == cropped_height &&
+ in_width == out_width && in_height == out_height);
+ } else {
+ ++dropped_frames_;
+ }
+ ++captured_frames_;
+ }
+
+ Stats GetStats() {
+ rtc::CritScope lock(&crit_);
+ Stats stats;
+ stats.captured_frames = captured_frames_;
+ stats.dropped_frames = dropped_frames_;
+ stats.last_adapt_was_no_op = last_adapt_was_no_op_;
+ stats.cropped_width = cropped_width_;
+ stats.cropped_height = cropped_height_;
+ stats.out_width = out_width_;
+ stats.out_height = out_height_;
+ return stats;
+ }
+
+ private:
+ rtc::CriticalSection crit_;
+ VideoAdapter* video_adapter_;
+ int cropped_width_;
+ int cropped_height_;
+ int out_width_;
+ int out_height_;
+ int captured_frames_;
+ int dropped_frames_;
+ bool last_adapt_was_no_op_;
+ };
+
+
+ void VerifyAdaptedResolution(const VideoCapturerListener::Stats& stats,
+ int cropped_width,
+ int cropped_height,
+ int out_width,
+ int out_height) {
+ EXPECT_EQ(cropped_width, stats.cropped_width);
+ EXPECT_EQ(cropped_height, stats.cropped_height);
+ EXPECT_EQ(out_width, stats.out_width);
+ EXPECT_EQ(out_height, stats.out_height);
+ }
+
+ std::unique_ptr<FakeVideoCapturer> capturer_;
+ VideoAdapter adapter_;
+ int cropped_width_;
+ int cropped_height_;
+ int out_width_;
+ int out_height_;
+ std::unique_ptr<VideoCapturerListener> listener_;
+ VideoFormat capture_format_;
+};
+
+// Do not adapt the frame rate or the resolution. Expect no frame drop, no
+// cropping, and no resolution change.
+TEST_F(VideoAdapterTest, AdaptNothing) {
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no frame drop and no resolution change.
+ VideoCapturerListener::Stats stats = listener_->GetStats();
+ EXPECT_GE(stats.captured_frames, 10);
+ EXPECT_EQ(0, stats.dropped_frames);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ capture_format_.width, capture_format_.height);
+ EXPECT_TRUE(stats.last_adapt_was_no_op);
+}
+
+TEST_F(VideoAdapterTest, AdaptZeroInterval) {
+ VideoFormat format = capturer_->GetSupportedFormats()->at(0);
+ format.interval = 0;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no crash and that frames aren't dropped.
+ VideoCapturerListener::Stats stats = listener_->GetStats();
+ EXPECT_GE(stats.captured_frames, 10);
+ EXPECT_EQ(0, stats.dropped_frames);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ capture_format_.width, capture_format_.height);
+}
+
+// Adapt the frame rate to be half of the capture rate at the beginning. Expect
+// the number of dropped frames to be half of the number the captured frames.
+TEST_F(VideoAdapterTest, AdaptFramerateToHalf) {
+ VideoFormat request_format = capture_format_;
+ request_format.interval *= 2;
+ adapter_.OnOutputFormatRequest(request_format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+
+ // Capture 10 frames and verify that every other frame is dropped. The first
+ // frame should not be dropped.
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 1);
+ EXPECT_EQ(0, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 2);
+ EXPECT_EQ(1, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 3);
+ EXPECT_EQ(1, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 4);
+ EXPECT_EQ(2, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 5);
+ EXPECT_EQ(2, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 6);
+ EXPECT_EQ(3, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 7);
+ EXPECT_EQ(3, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 8);
+ EXPECT_EQ(4, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 9);
+ EXPECT_EQ(4, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 10);
+ EXPECT_EQ(5, listener_->GetStats().dropped_frames);
+}
+
+// Adapt the frame rate to be two thirds of the capture rate at the beginning.
+// Expect the number of dropped frames to be one thirds of the number the
+// captured frames.
+TEST_F(VideoAdapterTest, AdaptFramerateToTwoThirds) {
+ VideoFormat request_format = capture_format_;
+ request_format.interval = request_format.interval * 3 / 2;
+ adapter_.OnOutputFormatRequest(request_format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+
+ // Capture 10 frames and verify that every third frame is dropped. The first
+ // frame should not be dropped.
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 1);
+ EXPECT_EQ(0, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 2);
+ EXPECT_EQ(0, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 3);
+ EXPECT_EQ(1, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 4);
+ EXPECT_EQ(1, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 5);
+ EXPECT_EQ(1, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 6);
+ EXPECT_EQ(2, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 7);
+ EXPECT_EQ(2, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 8);
+ EXPECT_EQ(2, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 9);
+ EXPECT_EQ(3, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 10);
+ EXPECT_EQ(3, listener_->GetStats().dropped_frames);
+}
+
+// Request frame rate twice as high as captured frame rate. Expect no frame
+// drop.
+TEST_F(VideoAdapterTest, AdaptFramerateHighLimit) {
+ VideoFormat request_format = capture_format_;
+ request_format.interval /= 2;
+ adapter_.OnOutputFormatRequest(request_format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no frame drop.
+ EXPECT_EQ(0, listener_->GetStats().dropped_frames);
+}
+
+// After the first timestamp, add a big offset to the timestamps. Expect that
+// the adapter is conservative and resets to the new offset and does not drop
+// any frame.
+TEST_F(VideoAdapterTest, AdaptFramerateTimestampOffset) {
+ const int64_t capture_interval = VideoFormat::FpsToInterval(kDefaultFps);
+ adapter_.OnOutputFormatRequest(
+ VideoFormat(640, 480, capture_interval, cricket::FOURCC_ANY));
+
+ const int64_t first_timestamp = 0;
+ adapter_.AdaptFrameResolution(640, 480, first_timestamp,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+
+ const int64_t big_offset = -987654321LL * 1000;
+ const int64_t second_timestamp = big_offset;
+ adapter_.AdaptFrameResolution(640, 480, second_timestamp,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+
+ const int64_t third_timestamp = big_offset + capture_interval;
+ adapter_.AdaptFrameResolution(640, 480, third_timestamp,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+}
+
+// Request 30 fps and send 30 fps with jitter. Expect that no frame is dropped.
+TEST_F(VideoAdapterTest, AdaptFramerateTimestampJitter) {
+ const int64_t capture_interval = VideoFormat::FpsToInterval(kDefaultFps);
+ adapter_.OnOutputFormatRequest(
+ VideoFormat(640, 480, capture_interval, cricket::FOURCC_ANY));
+
+ adapter_.AdaptFrameResolution(640, 480, capture_interval * 0 / 10,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+
+ adapter_.AdaptFrameResolution(640, 480, capture_interval * 10 / 10 - 1,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+
+ adapter_.AdaptFrameResolution(640, 480, capture_interval * 25 / 10,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+
+ adapter_.AdaptFrameResolution(640, 480, capture_interval * 30 / 10,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+
+ adapter_.AdaptFrameResolution(640, 480, capture_interval * 35 / 10,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+
+ adapter_.AdaptFrameResolution(640, 480, capture_interval * 50 / 10,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+}
+
+// Adapt the frame rate to be half of the capture rate after capturing no less
+// than 10 frames. Expect no frame dropped before adaptation and frame dropped
+// after adaptation.
+TEST_F(VideoAdapterTest, AdaptFramerateOntheFly) {
+ VideoFormat request_format = capture_format_;
+ adapter_.OnOutputFormatRequest(request_format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no frame drop before adaptation.
+ EXPECT_EQ(0, listener_->GetStats().dropped_frames);
+
+ // Adapat the frame rate.
+ request_format.interval *= 2;
+ adapter_.OnOutputFormatRequest(request_format);
+
+ for (int i = 0; i < 20; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify frame drop after adaptation.
+ EXPECT_GT(listener_->GetStats().dropped_frames, 0);
+}
+
+// Do not adapt the frame rate or the resolution. Expect no frame drop, no
+// cropping, and no resolution change.
+TEST_F(VideoAdapterTest, OnFramerateRequestMax) {
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt,
+ std::numeric_limits<int>::max(),
+ std::numeric_limits<int>::max());
+
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no frame drop and no resolution change.
+ VideoCapturerListener::Stats stats = listener_->GetStats();
+ EXPECT_GE(stats.captured_frames, 10);
+ EXPECT_EQ(0, stats.dropped_frames);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ capture_format_.width, capture_format_.height);
+ EXPECT_TRUE(stats.last_adapt_was_no_op);
+}
+
+TEST_F(VideoAdapterTest, OnFramerateRequestZero) {
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt,
+ std::numeric_limits<int>::max(), 0);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no crash and that frames aren't dropped.
+ VideoCapturerListener::Stats stats = listener_->GetStats();
+ EXPECT_GE(stats.captured_frames, 10);
+ EXPECT_EQ(10, stats.dropped_frames);
+}
+
+// Adapt the frame rate to be half of the capture rate at the beginning. Expect
+// the number of dropped frames to be half of the number the captured frames.
+TEST_F(VideoAdapterTest, OnFramerateRequestHalf) {
+ adapter_.OnResolutionFramerateRequest(
+ rtc::nullopt, std::numeric_limits<int>::max(), kDefaultFps / 2);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no crash and that frames aren't dropped.
+ VideoCapturerListener::Stats stats = listener_->GetStats();
+ EXPECT_GE(stats.captured_frames, 10);
+ EXPECT_EQ(5, stats.dropped_frames);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ capture_format_.width, capture_format_.height);
+}
+
+// Set a very high output pixel resolution. Expect no cropping or resolution
+// change.
+TEST_F(VideoAdapterTest, AdaptFrameResolutionHighLimit) {
+ VideoFormat output_format = capture_format_;
+ output_format.width *= 10;
+ output_format.height *= 10;
+ adapter_.OnOutputFormatRequest(output_format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(capture_format_.width, cropped_width_);
+ EXPECT_EQ(capture_format_.height, cropped_height_);
+ EXPECT_EQ(capture_format_.width, out_width_);
+ EXPECT_EQ(capture_format_.height, out_height_);
+}
+
+// Adapt the frame resolution to be the same as capture resolution. Expect no
+// cropping or resolution change.
+TEST_F(VideoAdapterTest, AdaptFrameResolutionIdentical) {
+ adapter_.OnOutputFormatRequest(capture_format_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(capture_format_.width, cropped_width_);
+ EXPECT_EQ(capture_format_.height, cropped_height_);
+ EXPECT_EQ(capture_format_.width, out_width_);
+ EXPECT_EQ(capture_format_.height, out_height_);
+}
+
+// Adapt the frame resolution to be a quarter of the capture resolution. Expect
+// no cropping, but a resolution change.
+TEST_F(VideoAdapterTest, AdaptFrameResolutionQuarter) {
+ VideoFormat request_format = capture_format_;
+ request_format.width /= 2;
+ request_format.height /= 2;
+ adapter_.OnOutputFormatRequest(request_format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(capture_format_.width, cropped_width_);
+ EXPECT_EQ(capture_format_.height, cropped_height_);
+ EXPECT_EQ(request_format.width, out_width_);
+ EXPECT_EQ(request_format.height, out_height_);
+}
+
+// Adapt the pixel resolution to 0. Expect frame drop.
+TEST_F(VideoAdapterTest, AdaptFrameResolutionDrop) {
+ VideoFormat output_format = capture_format_;
+ output_format.width = 0;
+ output_format.height = 0;
+ adapter_.OnOutputFormatRequest(output_format);
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+}
+
+// Adapt the frame resolution to be a quarter of the capture resolution at the
+// beginning. Expect no cropping but a resolution change.
+TEST_F(VideoAdapterTest, AdaptResolution) {
+ VideoFormat request_format = capture_format_;
+ request_format.width /= 2;
+ request_format.height /= 2;
+ adapter_.OnOutputFormatRequest(request_format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no frame drop, no cropping, and resolution change.
+ VideoCapturerListener::Stats stats = listener_->GetStats();
+ EXPECT_EQ(0, stats.dropped_frames);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ request_format.width, request_format.height);
+}
+
+// Adapt the frame resolution to be a quarter of the capture resolution after
+// capturing no less than 10 frames. Expect no resolution change before
+// adaptation and resolution change after adaptation.
+TEST_F(VideoAdapterTest, AdaptResolutionOnTheFly) {
+ VideoFormat request_format = capture_format_;
+ adapter_.OnOutputFormatRequest(request_format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no resolution change before adaptation.
+ VerifyAdaptedResolution(listener_->GetStats(),
+ capture_format_.width, capture_format_.height,
+ request_format.width, request_format.height);
+
+ // Adapt the frame resolution.
+ request_format.width /= 2;
+ request_format.height /= 2;
+ adapter_.OnOutputFormatRequest(request_format);
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify resolution change after adaptation.
+ VerifyAdaptedResolution(listener_->GetStats(),
+ capture_format_.width, capture_format_.height,
+ request_format.width, request_format.height);
+}
+
+// Drop all frames.
+TEST_F(VideoAdapterTest, DropAllFrames) {
+ VideoFormat format; // with resolution 0x0.
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify all frames are dropped.
+ VideoCapturerListener::Stats stats = listener_->GetStats();
+ EXPECT_GE(stats.captured_frames, 10);
+ EXPECT_EQ(stats.captured_frames, stats.dropped_frames);
+}
+
+TEST_F(VideoAdapterTest, TestOnOutputFormatRequest) {
+ VideoFormat format(640, 400, 0, 0);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(400, out_height_);
+
+ // Format request 640x400.
+ format.height = 400;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(400, out_height_);
+
+ // Request 1280x720, higher than input, but aspect 16:9. Expect cropping but
+ // no scaling.
+ format.width = 1280;
+ format.height = 720;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // Request 0x0.
+ format.width = 0;
+ format.height = 0;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+
+ // Request 320x200. Expect scaling, but no cropping.
+ format.width = 320;
+ format.height = 200;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(320, out_width_);
+ EXPECT_EQ(200, out_height_);
+
+ // Request resolution close to 2/3 scale. Expect adapt down. Scaling to 2/3
+ // is not optimized and not allowed, therefore 1/2 scaling will be used
+ // instead.
+ format.width = 424;
+ format.height = 265;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(320, out_width_);
+ EXPECT_EQ(200, out_height_);
+
+ // Request resolution of 3 / 8. Expect adapt down.
+ format.width = 640 * 3 / 8;
+ format.height = 400 * 3 / 8;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(640 * 3 / 8, out_width_);
+ EXPECT_EQ(400 * 3 / 8, out_height_);
+
+ // Switch back up. Expect adapt.
+ format.width = 320;
+ format.height = 200;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(320, out_width_);
+ EXPECT_EQ(200, out_height_);
+
+ // Format request 480x300.
+ format.width = 480;
+ format.height = 300;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(300, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestViewRequestPlusCameraSwitch) {
+ // Start at HD.
+ VideoFormat format(1280, 720, 0, 0);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+
+ // Format request for VGA.
+ format.width = 640;
+ format.height = 360;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // Now, the camera reopens at VGA.
+ // Both the frame and the output format should be 640x360.
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 360, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // And another view request comes in for 640x360, which should have no
+ // real impact.
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 360, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestVGAWidth) {
+ // Reqeuested Output format is 640x360.
+ VideoFormat format(640, 360, 0, FOURCC_I420);
+ adapter_.OnOutputFormatRequest(format);
+
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ // Expect cropping.
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // But if frames come in at 640x360, we shouldn't adapt them down.
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 360, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+
+ // Adapt down one step.
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 1280 * 720 - 1,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(960, out_width_);
+ EXPECT_EQ(540, out_height_);
+
+ // Adapt down one step more.
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 960 * 540 - 1,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // Adapt down one step more.
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 640 * 360 - 1,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ // Adapt up one step.
+ adapter_.OnResolutionFramerateRequest(640 * 360,
+ 960 * 540,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // Adapt up one step more.
+ adapter_.OnResolutionFramerateRequest(960 * 540,
+ 1280 * 720,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(960, out_width_);
+ EXPECT_EQ(540, out_height_);
+
+ // Adapt up one step more.
+ adapter_.OnResolutionFramerateRequest(1280 * 720,
+ 1920 * 1080,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestOnResolutionRequestMaxZero) {
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 0,
+ std::numeric_limits<int>::max());
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+}
+
+TEST_F(VideoAdapterTest, TestOnResolutionRequestInLargeSteps) {
+ // Large step down.
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 640 * 360 - 1,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ // Large step up.
+ adapter_.OnResolutionFramerateRequest(1280 * 720, 1920 * 1080,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestOnOutputFormatRequestCapsMaxResolution) {
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 640 * 360 - 1,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ VideoFormat new_format(640, 360, 0, FOURCC_I420);
+ adapter_.OnOutputFormatRequest(new_format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 960 * 720,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestOnResolutionRequestReset) {
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 640 * 360 - 1,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt,
+ std::numeric_limits<int>::max(),
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestCroppingWithResolutionRequest) {
+ // Ask for 640x360 (16:9 aspect).
+ adapter_.OnOutputFormatRequest(VideoFormat(640, 360, 0, FOURCC_I420));
+ // Send 640x480 (4:3 aspect).
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ // Expect cropping to 16:9 format and no scaling.
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // Adapt down one step.
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 640 * 360 - 1,
+ std::numeric_limits<int>::max());
+ // Expect cropping to 16:9 format and 3/4 scaling.
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ // Adapt down one step more.
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 480 * 270 - 1,
+ std::numeric_limits<int>::max());
+ // Expect cropping to 16:9 format and 1/2 scaling.
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(320, out_width_);
+ EXPECT_EQ(180, out_height_);
+
+ // Adapt up one step.
+ adapter_.OnResolutionFramerateRequest(480 * 270, 640 * 360,
+ std::numeric_limits<int>::max());
+ // Expect cropping to 16:9 format and 3/4 scaling.
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ // Adapt up one step more.
+ adapter_.OnResolutionFramerateRequest(640 * 360, 960 * 540,
+ std::numeric_limits<int>::max());
+ // Expect cropping to 16:9 format and no scaling.
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // Try to adapt up one step more.
+ adapter_.OnResolutionFramerateRequest(960 * 540, 1280 * 720,
+ std::numeric_limits<int>::max());
+ // Expect cropping to 16:9 format and no scaling.
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestCroppingOddResolution) {
+ // Ask for 640x360 (16:9 aspect), with 3/16 scaling.
+ adapter_.OnOutputFormatRequest(
+ VideoFormat(640, 360, 0, FOURCC_I420));
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt,
+ 640 * 360 * 3 / 16 * 3 / 16,
+ std::numeric_limits<int>::max());
+
+ // Send 640x480 (4:3 aspect).
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+
+ // Instead of getting the exact aspect ratio with cropped resolution 640x360,
+ // the resolution should be adjusted to get a perfect scale factor instead.
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(368, cropped_height_);
+ EXPECT_EQ(120, out_width_);
+ EXPECT_EQ(69, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestAdaptToVerySmallResolution) {
+ // Ask for 1920x1080 (16:9 aspect), with 1/16 scaling.
+ const int w = 1920;
+ const int h = 1080;
+ adapter_.OnOutputFormatRequest(VideoFormat(w, h, 0, FOURCC_I420));
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt,
+ w * h * 1 / 16 * 1 / 16,
+ std::numeric_limits<int>::max());
+
+ // Send 1920x1080 (16:9 aspect).
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(
+ w, h, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_));
+
+ // Instead of getting the exact aspect ratio with cropped resolution 1920x1080
+ // the resolution should be adjusted to get a perfect scale factor instead.
+ EXPECT_EQ(1920, cropped_width_);
+ EXPECT_EQ(1072, cropped_height_);
+ EXPECT_EQ(120, out_width_);
+ EXPECT_EQ(67, out_height_);
+
+ // Adapt back up one step to 3/32.
+ adapter_.OnResolutionFramerateRequest(w * h * 3 / 32 * 3 / 32,
+ w * h * 1 / 8 * 1 / 8,
+ std::numeric_limits<int>::max());
+
+ // Send 1920x1080 (16:9 aspect).
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(
+ w, h, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_));
+
+ EXPECT_EQ(180, out_width_);
+ EXPECT_EQ(99, out_height_);
+}
+
+TEST_F(VideoAdapterTest, AdaptFrameResolutionDropWithResolutionRequest) {
+ VideoFormat output_format = capture_format_;
+ output_format.width = 0;
+ output_format.height = 0;
+ adapter_.OnOutputFormatRequest(output_format);
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+
+ adapter_.OnResolutionFramerateRequest(960 * 540,
+ std::numeric_limits<int>::max(),
+ std::numeric_limits<int>::max());
+
+ // Still expect all frames to be dropped
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 640 * 480 - 1,
+ std::numeric_limits<int>::max());
+
+ // Still expect all frames to be dropped
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+}
+
+// Test that we will adapt to max given a target pixel count close to max.
+TEST_F(VideoAdapterTest, TestAdaptToMax) {
+ adapter_.OnOutputFormatRequest(VideoFormat(640, 360, 0, FOURCC_I420));
+ adapter_.OnResolutionFramerateRequest(640 * 360 - 1 /* target */,
+ std::numeric_limits<int>::max(),
+ std::numeric_limits<int>::max());
+
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 360, 0, &cropped_width_,
+ &cropped_height_, &out_width_,
+ &out_height_));
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+}
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/videobroadcaster.cc b/third_party/libwebrtc/webrtc/media/base/videobroadcaster.cc
new file mode 100644
index 0000000000..d2a9c54116
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videobroadcaster.cc
@@ -0,0 +1,131 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/videobroadcaster.h"
+
+#include <limits>
+
+#include "api/video/i420_buffer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace rtc {
+
+VideoBroadcaster::VideoBroadcaster() {
+ thread_checker_.DetachFromThread();
+}
+
+void VideoBroadcaster::AddOrUpdateSink(
+ VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const VideoSinkWants& wants) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(sink != nullptr);
+ rtc::CritScope cs(&sinks_and_wants_lock_);
+ VideoSourceBase::AddOrUpdateSink(sink, wants);
+ UpdateWants();
+}
+
+void VideoBroadcaster::RemoveSink(
+ VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(sink != nullptr);
+ rtc::CritScope cs(&sinks_and_wants_lock_);
+ VideoSourceBase::RemoveSink(sink);
+ UpdateWants();
+}
+
+bool VideoBroadcaster::frame_wanted() const {
+ rtc::CritScope cs(&sinks_and_wants_lock_);
+ return !sink_pairs().empty();
+}
+
+VideoSinkWants VideoBroadcaster::wants() const {
+ rtc::CritScope cs(&sinks_and_wants_lock_);
+ return current_wants_;
+}
+
+void VideoBroadcaster::OnFrame(const webrtc::VideoFrame& frame) {
+ rtc::CritScope cs(&sinks_and_wants_lock_);
+ for (auto& sink_pair : sink_pairs()) {
+ if (sink_pair.wants.rotation_applied &&
+ frame.rotation() != webrtc::kVideoRotation_0) {
+ // Calls to OnFrame are not synchronized with changes to the sink wants.
+ // When rotation_applied is set to true, one or a few frames may get here
+ // with rotation still pending. Protect sinks that don't expect any
+ // pending rotation.
+ RTC_LOG(LS_VERBOSE) << "Discarding frame with unexpected rotation.";
+ continue;
+ }
+ if (sink_pair.wants.black_frames) {
+ sink_pair.sink->OnFrame(webrtc::VideoFrame(
+ GetBlackFrameBuffer(frame.width(), frame.height()), frame.rotation(),
+ frame.timestamp_us()));
+ } else {
+ sink_pair.sink->OnFrame(frame);
+ }
+ }
+}
+
+void VideoBroadcaster::OnDiscardedFrame() {
+ for (auto& sink_pair : sink_pairs()) {
+ sink_pair.sink->OnDiscardedFrame();
+ }
+}
+
+void VideoBroadcaster::UpdateWants() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+
+ VideoSinkWants wants;
+ wants.rotation_applied = false;
+ for (auto& sink : sink_pairs()) {
+ // wants.rotation_applied == ANY(sink.wants.rotation_applied)
+ if (sink.wants.rotation_applied) {
+ wants.rotation_applied = true;
+ }
+ // wants.max_pixel_count == MIN(sink.wants.max_pixel_count)
+ if (sink.wants.max_pixel_count < wants.max_pixel_count) {
+ wants.max_pixel_count = sink.wants.max_pixel_count;
+ }
+ // Select the minimum requested target_pixel_count, if any, of all sinks so
+ // that we don't over utilize the resources for any one.
+ // TODO(sprang): Consider using the median instead, since the limit can be
+ // expressed by max_pixel_count.
+ if (sink.wants.target_pixel_count &&
+ (!wants.target_pixel_count ||
+ (*sink.wants.target_pixel_count < *wants.target_pixel_count))) {
+ wants.target_pixel_count = sink.wants.target_pixel_count;
+ }
+ // Select the minimum for the requested max framerates.
+ if (sink.wants.max_framerate_fps < wants.max_framerate_fps) {
+ wants.max_framerate_fps = sink.wants.max_framerate_fps;
+ }
+ }
+
+ if (wants.target_pixel_count &&
+ *wants.target_pixel_count >= wants.max_pixel_count) {
+ wants.target_pixel_count.emplace(wants.max_pixel_count);
+ }
+ current_wants_ = wants;
+}
+
+const rtc::scoped_refptr<webrtc::VideoFrameBuffer>&
+VideoBroadcaster::GetBlackFrameBuffer(int width, int height) {
+ if (!black_frame_buffer_ || black_frame_buffer_->width() != width ||
+ black_frame_buffer_->height() != height) {
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer =
+ webrtc::I420Buffer::Create(width, height);
+ webrtc::I420Buffer::SetBlack(buffer.get());
+ black_frame_buffer_ = buffer;
+ }
+
+ return black_frame_buffer_;
+}
+
+} // namespace rtc
diff --git a/third_party/libwebrtc/webrtc/media/base/videobroadcaster.h b/third_party/libwebrtc/webrtc/media/base/videobroadcaster.h
new file mode 100644
index 0000000000..a8e21fa5b8
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videobroadcaster.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_VIDEOBROADCASTER_H_
+#define MEDIA_BASE_VIDEOBROADCASTER_H_
+
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "api/video/video_frame.h"
+#include "media/base/videosinkinterface.h"
+#include "media/base/videosourcebase.h"
+#include "rtc_base/criticalsection.h"
+#include "rtc_base/thread_checker.h"
+
+namespace rtc {
+
+// VideoBroadcaster broadcast video frames to sinks and combines
+// VideoSinkWants from its sinks. It does that by implementing
+// rtc::VideoSourceInterface and rtc::VideoSinkInterface.
+// Sinks must be added and removed on one and only one thread.
+// Video frames can be broadcasted on any thread. I.e VideoBroadcaster::OnFrame
+// can be called on any thread.
+class VideoBroadcaster : public VideoSourceBase,
+ public VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ VideoBroadcaster();
+ void AddOrUpdateSink(VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const VideoSinkWants& wants) override;
+ void RemoveSink(VideoSinkInterface<webrtc::VideoFrame>* sink) override;
+
+ // Returns true if the next frame will be delivered to at least one sink.
+ bool frame_wanted() const;
+
+ // Returns VideoSinkWants a source is requested to fulfill. They are
+ // aggregated by all VideoSinkWants from all sinks.
+ VideoSinkWants wants() const;
+
+ // This method ensures that if a sink sets rotation_applied == true,
+ // it will never receive a frame with pending rotation. Our caller
+ // may pass in frames without precise synchronization with changes
+ // to the VideoSinkWants.
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+
+ void OnDiscardedFrame() override;
+
+ protected:
+ void UpdateWants() RTC_EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_);
+ const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& GetBlackFrameBuffer(
+ int width,
+ int height) RTC_EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_);
+
+ ThreadChecker thread_checker_;
+ rtc::CriticalSection sinks_and_wants_lock_;
+
+ VideoSinkWants current_wants_ RTC_GUARDED_BY(sinks_and_wants_lock_);
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> black_frame_buffer_;
+};
+
+} // namespace rtc
+
+#endif // MEDIA_BASE_VIDEOBROADCASTER_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/videobroadcaster_unittest.cc b/third_party/libwebrtc/webrtc/media/base/videobroadcaster_unittest.cc
new file mode 100644
index 0000000000..0f2057ebe0
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videobroadcaster_unittest.cc
@@ -0,0 +1,195 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <limits>
+
+#include "media/base/videobroadcaster.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "media/base/fakevideorenderer.h"
+#include "rtc_base/gunit.h"
+
+using rtc::VideoBroadcaster;
+using rtc::VideoSinkWants;
+using cricket::FakeVideoRenderer;
+
+
+TEST(VideoBroadcasterTest, frame_wanted) {
+ VideoBroadcaster broadcaster;
+ EXPECT_FALSE(broadcaster.frame_wanted());
+
+ FakeVideoRenderer sink;
+ broadcaster.AddOrUpdateSink(&sink, rtc::VideoSinkWants());
+ EXPECT_TRUE(broadcaster.frame_wanted());
+
+ broadcaster.RemoveSink(&sink);
+ EXPECT_FALSE(broadcaster.frame_wanted());
+}
+
+TEST(VideoBroadcasterTest, OnFrame) {
+ VideoBroadcaster broadcaster;
+
+ FakeVideoRenderer sink1;
+ FakeVideoRenderer sink2;
+ broadcaster.AddOrUpdateSink(&sink1, rtc::VideoSinkWants());
+ broadcaster.AddOrUpdateSink(&sink2, rtc::VideoSinkWants());
+ static int kWidth = 100;
+ static int kHeight = 50;
+
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer(
+ webrtc::I420Buffer::Create(kWidth, kHeight));
+ // Initialize, to avoid warnings on use of initialized values.
+ webrtc::I420Buffer::SetBlack(buffer);
+
+ webrtc::VideoFrame frame(buffer, webrtc::kVideoRotation_0, 0);
+
+ broadcaster.OnFrame(frame);
+ EXPECT_EQ(1, sink1.num_rendered_frames());
+ EXPECT_EQ(1, sink2.num_rendered_frames());
+
+ broadcaster.RemoveSink(&sink1);
+ broadcaster.OnFrame(frame);
+ EXPECT_EQ(1, sink1.num_rendered_frames());
+ EXPECT_EQ(2, sink2.num_rendered_frames());
+
+ broadcaster.AddOrUpdateSink(&sink1, rtc::VideoSinkWants());
+ broadcaster.OnFrame(frame);
+ EXPECT_EQ(2, sink1.num_rendered_frames());
+ EXPECT_EQ(3, sink2.num_rendered_frames());
+}
+
+TEST(VideoBroadcasterTest, AppliesRotationIfAnySinkWantsRotationApplied) {
+ VideoBroadcaster broadcaster;
+ EXPECT_FALSE(broadcaster.wants().rotation_applied);
+
+ FakeVideoRenderer sink1;
+ VideoSinkWants wants1;
+ wants1.rotation_applied = false;
+
+ broadcaster.AddOrUpdateSink(&sink1, wants1);
+ EXPECT_FALSE(broadcaster.wants().rotation_applied);
+
+ FakeVideoRenderer sink2;
+ VideoSinkWants wants2;
+ wants2.rotation_applied = true;
+
+ broadcaster.AddOrUpdateSink(&sink2, wants2);
+ EXPECT_TRUE(broadcaster.wants().rotation_applied);
+
+ broadcaster.RemoveSink(&sink2);
+ EXPECT_FALSE(broadcaster.wants().rotation_applied);
+}
+
+TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxPixelCount) {
+ VideoBroadcaster broadcaster;
+ EXPECT_EQ(std::numeric_limits<int>::max(),
+ broadcaster.wants().max_pixel_count);
+
+ FakeVideoRenderer sink1;
+ VideoSinkWants wants1;
+ wants1.max_pixel_count = 1280 * 720;
+
+ broadcaster.AddOrUpdateSink(&sink1, wants1);
+ EXPECT_EQ(1280 * 720, broadcaster.wants().max_pixel_count);
+
+ FakeVideoRenderer sink2;
+ VideoSinkWants wants2;
+ wants2.max_pixel_count = 640 * 360;
+ broadcaster.AddOrUpdateSink(&sink2, wants2);
+ EXPECT_EQ(640 * 360, broadcaster.wants().max_pixel_count);
+
+ broadcaster.RemoveSink(&sink2);
+ EXPECT_EQ(1280 * 720, broadcaster.wants().max_pixel_count);
+}
+
+TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxAndTargetPixelCount) {
+ VideoBroadcaster broadcaster;
+ EXPECT_TRUE(!broadcaster.wants().target_pixel_count);
+
+ FakeVideoRenderer sink1;
+ VideoSinkWants wants1;
+ wants1.target_pixel_count = 1280 * 720;
+
+ broadcaster.AddOrUpdateSink(&sink1, wants1);
+ EXPECT_EQ(1280 * 720, *broadcaster.wants().target_pixel_count);
+
+ FakeVideoRenderer sink2;
+ VideoSinkWants wants2;
+ wants2.target_pixel_count = 640 * 360;
+ broadcaster.AddOrUpdateSink(&sink2, wants2);
+ EXPECT_EQ(640 * 360, *broadcaster.wants().target_pixel_count);
+
+ broadcaster.RemoveSink(&sink2);
+ EXPECT_EQ(1280 * 720, *broadcaster.wants().target_pixel_count);
+}
+
+TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxFramerate) {
+ VideoBroadcaster broadcaster;
+ EXPECT_EQ(std::numeric_limits<int>::max(),
+ broadcaster.wants().max_framerate_fps);
+
+ FakeVideoRenderer sink1;
+ VideoSinkWants wants1;
+ wants1.max_framerate_fps = 30;
+
+ broadcaster.AddOrUpdateSink(&sink1, wants1);
+ EXPECT_EQ(30, broadcaster.wants().max_framerate_fps);
+
+ FakeVideoRenderer sink2;
+ VideoSinkWants wants2;
+ wants2.max_framerate_fps = 15;
+ broadcaster.AddOrUpdateSink(&sink2, wants2);
+ EXPECT_EQ(15, broadcaster.wants().max_framerate_fps);
+
+ broadcaster.RemoveSink(&sink2);
+ EXPECT_EQ(30, broadcaster.wants().max_framerate_fps);
+}
+
+TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
+ VideoBroadcaster broadcaster;
+ EXPECT_TRUE(!broadcaster.wants().black_frames);
+
+ FakeVideoRenderer sink1;
+ VideoSinkWants wants1;
+ wants1.black_frames = true;
+ broadcaster.AddOrUpdateSink(&sink1, wants1);
+
+ FakeVideoRenderer sink2;
+ VideoSinkWants wants2;
+ wants2.black_frames = false;
+ broadcaster.AddOrUpdateSink(&sink2, wants2);
+
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer(
+ webrtc::I420Buffer::Create(100, 200));
+ // Makes it not all black.
+ buffer->InitializeData();
+
+ webrtc::VideoFrame frame1(buffer, webrtc::kVideoRotation_0,
+ 10 /* timestamp_us */);
+ broadcaster.OnFrame(frame1);
+ EXPECT_TRUE(sink1.black_frame());
+ EXPECT_EQ(10, sink1.timestamp_us());
+ EXPECT_FALSE(sink2.black_frame());
+ EXPECT_EQ(10, sink2.timestamp_us());
+
+ // Switch the sink wants.
+ wants1.black_frames = false;
+ broadcaster.AddOrUpdateSink(&sink1, wants1);
+ wants2.black_frames = true;
+ broadcaster.AddOrUpdateSink(&sink2, wants2);
+
+ webrtc::VideoFrame frame2(buffer, webrtc::kVideoRotation_0,
+ 30 /* timestamp_us */);
+ broadcaster.OnFrame(frame2);
+ EXPECT_FALSE(sink1.black_frame());
+ EXPECT_EQ(30, sink1.timestamp_us());
+ EXPECT_TRUE(sink2.black_frame());
+ EXPECT_EQ(30, sink2.timestamp_us());
+}
diff --git a/third_party/libwebrtc/webrtc/media/base/videocapturer.cc b/third_party/libwebrtc/webrtc/media/base/videocapturer.cc
new file mode 100644
index 0000000000..f6e05ba5d5
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videocapturer.cc
@@ -0,0 +1,381 @@
+/*
+ * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Implementation file of class VideoCapturer.
+
+#include "media/base/videocapturer.h"
+
+#include <algorithm>
+
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "rtc_base/logging.h"
+#include "system_wrappers/include/field_trial.h"
+
+namespace cricket {
+
+namespace {
+
+static const int64_t kMaxDistance = ~(static_cast<int64_t>(1) << 63);
+#ifdef WEBRTC_LINUX
+static const int kYU12Penalty = 16; // Needs to be higher than MJPG index.
+#endif
+static const char* kSimulcastScreenshareFieldTrialName =
+ "WebRTC-SimulcastScreenshare";
+
+} // namespace
+
+/////////////////////////////////////////////////////////////////////
+// Implementation of class VideoCapturer
+/////////////////////////////////////////////////////////////////////
+VideoCapturer::VideoCapturer() : apply_rotation_(false) {
+ thread_checker_.DetachFromThread();
+ Construct();
+}
+
+void VideoCapturer::Construct() {
+ enable_camera_list_ = false;
+ capture_state_ = CS_STOPPED;
+ scaled_width_ = 0;
+ scaled_height_ = 0;
+ enable_video_adapter_ = true;
+}
+
+const std::vector<VideoFormat>* VideoCapturer::GetSupportedFormats() const {
+ return &filtered_supported_formats_;
+}
+
+bool VideoCapturer::StartCapturing(const VideoFormat& capture_format) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ CaptureState result = Start(capture_format);
+ const bool success = (result == CS_RUNNING) || (result == CS_STARTING);
+ if (!success) {
+ return false;
+ }
+ if (result == CS_RUNNING) {
+ SetCaptureState(result);
+ }
+ return true;
+}
+
+void VideoCapturer::SetSupportedFormats(
+ const std::vector<VideoFormat>& formats) {
+ // This method is OK to call during initialization on a separate thread.
+ RTC_DCHECK(capture_state_ == CS_STOPPED ||
+ thread_checker_.CalledOnValidThread());
+ supported_formats_ = formats;
+ UpdateFilteredSupportedFormats();
+}
+
+bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format,
+ VideoFormat* best_format) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ // TODO(fbarchard): Directly support max_format.
+ UpdateFilteredSupportedFormats();
+ const std::vector<VideoFormat>* supported_formats = GetSupportedFormats();
+
+ if (supported_formats->empty()) {
+ return false;
+ }
+ RTC_LOG(LS_INFO) << " Capture Requested " << format.ToString();
+ int64_t best_distance = kMaxDistance;
+ std::vector<VideoFormat>::const_iterator best = supported_formats->end();
+ std::vector<VideoFormat>::const_iterator i;
+ for (i = supported_formats->begin(); i != supported_formats->end(); ++i) {
+ int64_t distance = GetFormatDistance(format, *i);
+ // TODO(fbarchard): Reduce to LS_VERBOSE if/when camera capture is
+ // relatively bug free.
+ RTC_LOG(LS_INFO) << " Supported " << i->ToString() << " distance "
+ << distance;
+ if (distance < best_distance) {
+ best_distance = distance;
+ best = i;
+ }
+ }
+ if (supported_formats->end() == best) {
+ RTC_LOG(LS_ERROR) << " No acceptable camera format found";
+ return false;
+ }
+
+ if (best_format) {
+ best_format->width = best->width;
+ best_format->height = best->height;
+ best_format->fourcc = best->fourcc;
+ best_format->interval = best->interval;
+ RTC_LOG(LS_INFO) << " Best " << best_format->ToString() << " Interval "
+ << best_format->interval << " distance " << best_distance;
+ }
+ return true;
+}
+
+void VideoCapturer::ConstrainSupportedFormats(const VideoFormat& max_format) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ max_format_.reset(new VideoFormat(max_format));
+ RTC_LOG(LS_VERBOSE) << " ConstrainSupportedFormats " << max_format.ToString();
+ UpdateFilteredSupportedFormats();
+}
+
+bool VideoCapturer::GetInputSize(int* width, int* height) {
+ rtc::CritScope cs(&frame_stats_crit_);
+ if (!input_size_valid_) {
+ return false;
+ }
+ *width = input_width_;
+ *height = input_height_;
+
+ return true;
+}
+
+void VideoCapturer::RemoveSink(
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ broadcaster_.RemoveSink(sink);
+ OnSinkWantsChanged(broadcaster_.wants());
+}
+
+void VideoCapturer::AddOrUpdateSink(
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ broadcaster_.AddOrUpdateSink(sink, wants);
+ OnSinkWantsChanged(broadcaster_.wants());
+}
+
+void VideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ apply_rotation_ = wants.rotation_applied;
+
+ if (video_adapter()) {
+ video_adapter()->OnResolutionFramerateRequest(wants.target_pixel_count,
+ wants.max_pixel_count,
+ wants.max_framerate_fps);
+ }
+}
+
+bool VideoCapturer::AdaptFrame(int width,
+ int height,
+ int64_t camera_time_us,
+ int64_t system_time_us,
+ int* out_width,
+ int* out_height,
+ int* crop_width,
+ int* crop_height,
+ int* crop_x,
+ int* crop_y,
+ int64_t* translated_camera_time_us) {
+ if (translated_camera_time_us) {
+ *translated_camera_time_us =
+ timestamp_aligner_.TranslateTimestamp(camera_time_us, system_time_us);
+ }
+ if (!broadcaster_.frame_wanted()) {
+ return false;
+ }
+
+ bool simulcast_screenshare_enabled =
+ webrtc::field_trial::IsEnabled(kSimulcastScreenshareFieldTrialName);
+ if (enable_video_adapter_ &&
+ (!IsScreencast() || simulcast_screenshare_enabled)) {
+ if (!video_adapter_.AdaptFrameResolution(
+ width, height, camera_time_us * rtc::kNumNanosecsPerMicrosec,
+ crop_width, crop_height, out_width, out_height)) {
+ // VideoAdapter dropped the frame.
+ broadcaster_.OnDiscardedFrame();
+ return false;
+ }
+ *crop_x = (width - *crop_width) / 2;
+ *crop_y = (height - *crop_height) / 2;
+ } else {
+ *out_width = width;
+ *out_height = height;
+ *crop_width = width;
+ *crop_height = height;
+ *crop_x = 0;
+ *crop_y = 0;
+ }
+
+ return true;
+}
+
+void VideoCapturer::OnFrame(const webrtc::VideoFrame& frame,
+ int orig_width,
+ int orig_height) {
+ // For a child class which implements rotation itself, we should
+ // always have apply_rotation_ == false or frame.rotation() == 0.
+ // Except possibly during races where apply_rotation_ is changed
+ // mid-stream.
+ if (apply_rotation_ && frame.rotation() != webrtc::kVideoRotation_0) {
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
+ frame.video_frame_buffer());
+ if (buffer->type() != webrtc::VideoFrameBuffer::Type::kI420) {
+ // Sources producing non-I420 frames must handle apply_rotation
+ // themselves. But even if they do, we may occasionally end up
+ // in this case, for frames in flight at the time
+ // applied_rotation is set to true. In that case, we just drop
+ // the frame.
+ RTC_LOG(LS_WARNING) << "Non-I420 frame requiring rotation. Discarding.";
+ return;
+ }
+ broadcaster_.OnFrame(webrtc::VideoFrame(
+ webrtc::I420Buffer::Rotate(*buffer->GetI420(), frame.rotation()),
+ webrtc::kVideoRotation_0, frame.timestamp_us()));
+ } else {
+ broadcaster_.OnFrame(frame);
+ }
+ UpdateInputSize(orig_width, orig_height);
+}
+
+void VideoCapturer::SetCaptureState(CaptureState state) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ if (state == capture_state_) {
+ // Don't trigger a state changed callback if the state hasn't changed.
+ return;
+ }
+ capture_state_ = state;
+ SignalStateChange(this, capture_state_);
+}
+
+// Get the distance between the supported and desired formats.
+// Prioritization is done according to this algorithm:
+// 1) Width closeness. If not same, we prefer wider.
+// 2) Height closeness. If not same, we prefer higher.
+// 3) Framerate closeness. If not same, we prefer faster.
+// 4) Compression. If desired format has a specific fourcc, we need exact match;
+// otherwise, we use preference.
+int64_t VideoCapturer::GetFormatDistance(const VideoFormat& desired,
+ const VideoFormat& supported) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ int64_t distance = kMaxDistance;
+
+ // Check fourcc.
+ uint32_t supported_fourcc = CanonicalFourCC(supported.fourcc);
+ int64_t delta_fourcc = kMaxDistance;
+ if (FOURCC_ANY == desired.fourcc) {
+ // Any fourcc is OK for the desired. Use preference to find best fourcc.
+ std::vector<uint32_t> preferred_fourccs;
+ if (!GetPreferredFourccs(&preferred_fourccs)) {
+ return distance;
+ }
+
+ for (size_t i = 0; i < preferred_fourccs.size(); ++i) {
+ if (supported_fourcc == CanonicalFourCC(preferred_fourccs[i])) {
+ delta_fourcc = i;
+#ifdef WEBRTC_LINUX
+ // For HD avoid YU12 which is a software conversion and has 2 bugs
+ // b/7326348 b/6960899. Reenable when fixed.
+ if (supported.height >= 720 && (supported_fourcc == FOURCC_YU12 ||
+ supported_fourcc == FOURCC_YV12)) {
+ delta_fourcc += kYU12Penalty;
+ }
+#endif
+ break;
+ }
+ }
+ } else if (supported_fourcc == CanonicalFourCC(desired.fourcc)) {
+ delta_fourcc = 0; // Need exact match.
+ }
+
+ if (kMaxDistance == delta_fourcc) {
+ // Failed to match fourcc.
+ return distance;
+ }
+
+ // Check resolution and fps.
+ int desired_width = desired.width;
+ int desired_height = desired.height;
+ int64_t delta_w = supported.width - desired_width;
+ float supported_fps = VideoFormat::IntervalToFpsFloat(supported.interval);
+ float delta_fps =
+ supported_fps - VideoFormat::IntervalToFpsFloat(desired.interval);
+ // Check height of supported height compared to height we would like it to be.
+ int64_t aspect_h = desired_width
+ ? supported.width * desired_height / desired_width
+ : desired_height;
+ int64_t delta_h = supported.height - aspect_h;
+
+ distance = 0;
+ // Set high penalty if the supported format is lower than the desired format.
+ // 3x means we would prefer down to down to 3/4, than up to double.
+ // But we'd prefer up to double than down to 1/2. This is conservative,
+ // strongly avoiding going down in resolution, similar to
+ // the old method, but not completely ruling it out in extreme situations.
+ // It also ignores framerate, which is often very low at high resolutions.
+ // TODO(fbarchard): Improve logic to use weighted factors.
+ static const int kDownPenalty = -3;
+ if (delta_w < 0) {
+ delta_w = delta_w * kDownPenalty;
+ }
+ if (delta_h < 0) {
+ delta_h = delta_h * kDownPenalty;
+ }
+ // Require camera fps to be at least 80% of what is requested if resolution
+ // matches.
+ // Require camera fps to be at least 96% of what is requested, or higher,
+ // if resolution differs. 96% allows for slight variations in fps. e.g. 29.97
+ if (delta_fps < 0) {
+ float min_desirable_fps = delta_w ?
+ VideoFormat::IntervalToFpsFloat(desired.interval) * 28.f / 30.f :
+ VideoFormat::IntervalToFpsFloat(desired.interval) * 23.f / 30.f;
+ delta_fps = -delta_fps;
+ if (supported_fps < min_desirable_fps) {
+ distance |= static_cast<int64_t>(1) << 62;
+ } else {
+ distance |= static_cast<int64_t>(1) << 15;
+ }
+ }
+ int64_t idelta_fps = static_cast<int>(delta_fps);
+
+ // 12 bits for width and height and 8 bits for fps and fourcc.
+ distance |=
+ (delta_w << 28) | (delta_h << 16) | (idelta_fps << 8) | delta_fourcc;
+
+ return distance;
+}
+
+void VideoCapturer::UpdateFilteredSupportedFormats() {
+ filtered_supported_formats_.clear();
+ filtered_supported_formats_ = supported_formats_;
+ if (!max_format_) {
+ return;
+ }
+ std::vector<VideoFormat>::iterator iter = filtered_supported_formats_.begin();
+ while (iter != filtered_supported_formats_.end()) {
+ if (ShouldFilterFormat(*iter)) {
+ iter = filtered_supported_formats_.erase(iter);
+ } else {
+ ++iter;
+ }
+ }
+ if (filtered_supported_formats_.empty()) {
+ // The device only captures at resolutions higher than |max_format_| this
+ // indicates that |max_format_| should be ignored as it is better to capture
+ // at too high a resolution than to not capture at all.
+ filtered_supported_formats_ = supported_formats_;
+ }
+}
+
+bool VideoCapturer::ShouldFilterFormat(const VideoFormat& format) const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ if (!enable_camera_list_) {
+ return false;
+ }
+ return format.width > max_format_->width ||
+ format.height > max_format_->height;
+}
+
+void VideoCapturer::UpdateInputSize(int width, int height) {
+ // Update stats protected from fetches from different thread.
+ rtc::CritScope cs(&frame_stats_crit_);
+
+ input_size_valid_ = true;
+ input_width_ = width;
+ input_height_ = height;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/videocapturer.h b/third_party/libwebrtc/webrtc/media/base/videocapturer.h
new file mode 100644
index 0000000000..684f8730bd
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videocapturer.h
@@ -0,0 +1,289 @@
+/*
+ * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Declaration of abstract class VideoCapturer
+
+#ifndef MEDIA_BASE_VIDEOCAPTURER_H_
+#define MEDIA_BASE_VIDEOCAPTURER_H_
+
+#include <stdint.h>
+
+#include <algorithm>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "media/base/videoadapter.h"
+#include "media/base/videobroadcaster.h"
+#include "media/base/videocommon.h"
+#include "media/base/videosourceinterface.h"
+#include "rtc_base/constructormagic.h"
+#include "rtc_base/criticalsection.h"
+#include "rtc_base/sigslot.h"
+#include "rtc_base/thread_checker.h"
+#include "rtc_base/timestampaligner.h"
+
+namespace webrtc {
+class VideoFrame;
+}
+
+namespace cricket {
+
+// Current state of the capturer.
+enum CaptureState {
+ CS_STOPPED, // The capturer has been stopped or hasn't started yet.
+ CS_STARTING, // The capturer is in the process of starting. Note, it may
+ // still fail to start.
+ CS_RUNNING, // The capturer has been started successfully and is now
+ // capturing.
+ CS_FAILED, // The capturer failed to start.
+};
+
+// VideoCapturer is an abstract class that defines the interfaces for video
+// capturing. The subclasses implement the video capturer for various types of
+// capturers and various platforms.
+//
+// The captured frames may need to be adapted (for example, cropping).
+// Video adaptation is built into and enabled by default. After a frame has
+// been captured from the device, it is sent to the video adapter, then out to
+// the sinks.
+//
+// Programming model:
+// Create an object of a subclass of VideoCapturer
+// Initialize
+// SignalStateChange.connect()
+// AddOrUpdateSink()
+// Find the capture format for Start() by either calling GetSupportedFormats()
+// and selecting one of the supported or calling GetBestCaptureFormat().
+// video_adapter()->OnOutputFormatRequest(desired_encoding_format)
+// Start()
+// GetCaptureFormat() optionally
+// Stop()
+//
+// Assumption:
+// The Start() and Stop() methods are called by a single thread (E.g., the
+// media engine thread). Hence, the VideoCapture subclasses dont need to be
+// thread safe.
+//
+class VideoCapturer : public sigslot::has_slots<>,
+ public rtc::VideoSourceInterface<webrtc::VideoFrame> {
+ public:
+ VideoCapturer();
+
+ virtual ~VideoCapturer() {}
+
+ // Gets the id of the underlying device, which is available after the capturer
+ // is initialized. Can be used to determine if two capturers reference the
+ // same device.
+ const std::string& GetId() const { return id_; }
+
+ // Get the capture formats supported by the video capturer. The supported
+ // formats are non empty after the device has been opened successfully.
+ const std::vector<VideoFormat>* GetSupportedFormats() const;
+
+ // Get the best capture format for the desired format. The best format is the
+ // same as one of the supported formats except that the frame interval may be
+ // different. If the application asks for 16x9 and the camera does not support
+ // 16x9 HD or the application asks for 16x10, we find the closest 4x3 and then
+ // crop; Otherwise, we find what the application asks for. Note that we assume
+ // that for HD, the desired format is always 16x9. The subclasses can override
+ // the default implementation.
+ // Parameters
+ // desired: the input desired format. If desired.fourcc is not kAnyFourcc,
+ // the best capture format has the exactly same fourcc. Otherwise,
+ // the best capture format uses a fourcc in GetPreferredFourccs().
+ // best_format: the output of the best capture format.
+ // Return false if there is no such a best format, that is, the desired format
+ // is not supported.
+ virtual bool GetBestCaptureFormat(const VideoFormat& desired,
+ VideoFormat* best_format);
+
+ // TODO(hellner): deprecate (make private) the Start API in favor of this one.
+ // Also remove CS_STARTING as it is implied by the return
+ // value of StartCapturing().
+ bool StartCapturing(const VideoFormat& capture_format);
+ // Start the video capturer with the specified capture format.
+ // Parameter
+ // capture_format: The caller got this parameter by either calling
+ // GetSupportedFormats() and selecting one of the supported
+ // or calling GetBestCaptureFormat().
+ // Return
+ // CS_STARTING: The capturer is trying to start. Success or failure will
+ // be notified via the |SignalStateChange| callback.
+ // CS_RUNNING: if the capturer is started and capturing.
+ // CS_FAILED: if the capturer failes to start..
+ // CS_NO_DEVICE: if the capturer has no device and fails to start.
+ virtual CaptureState Start(const VideoFormat& capture_format) = 0;
+
+ // Get the current capture format, which is set by the Start() call.
+ // Note that the width and height of the captured frames may differ from the
+ // capture format. For example, the capture format is HD but the captured
+ // frames may be smaller than HD.
+ const VideoFormat* GetCaptureFormat() const {
+ return capture_format_.get();
+ }
+
+ // Stop the video capturer.
+ virtual void Stop() = 0;
+ // Check if the video capturer is running.
+ virtual bool IsRunning() = 0;
+ CaptureState capture_state() const {
+ return capture_state_;
+ }
+
+ virtual bool apply_rotation() { return apply_rotation_; }
+
+ // Returns true if the capturer is screencasting. This can be used to
+ // implement screencast specific behavior.
+ virtual bool IsScreencast() const = 0;
+
+ // Caps the VideoCapturer's format according to max_format. It can e.g. be
+ // used to prevent cameras from capturing at a resolution or framerate that
+ // the capturer is capable of but not performing satisfactorily at.
+ // The capping is an upper bound for each component of the capturing format.
+ // The fourcc component is ignored.
+ void ConstrainSupportedFormats(const VideoFormat& max_format);
+
+ void set_enable_camera_list(bool enable_camera_list) {
+ enable_camera_list_ = enable_camera_list;
+ }
+ bool enable_camera_list() {
+ return enable_camera_list_;
+ }
+
+ // Signal all capture state changes that are not a direct result of calling
+ // Start().
+ sigslot::signal2<VideoCapturer*, CaptureState> SignalStateChange;
+
+ // If true, run video adaptation. By default, video adaptation is enabled
+ // and users must call video_adapter()->OnOutputFormatRequest()
+ // to receive frames.
+ bool enable_video_adapter() const { return enable_video_adapter_; }
+ void set_enable_video_adapter(bool enable_video_adapter) {
+ enable_video_adapter_ = enable_video_adapter;
+ }
+
+ bool GetInputSize(int* width, int* height);
+
+ // Implements VideoSourceInterface
+ void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override;
+ void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override;
+
+ protected:
+ // OnSinkWantsChanged can be overridden to change the default behavior
+ // when a sink changes its VideoSinkWants by calling AddOrUpdateSink.
+ virtual void OnSinkWantsChanged(const rtc::VideoSinkWants& wants);
+
+ // Reports the appropriate frame size after adaptation. Returns true
+ // if a frame is wanted. Returns false if there are no interested
+ // sinks, or if the VideoAdapter decides to drop the frame.
+
+ // This function also implements timestamp translation/filtering.
+ // |camera_time_ns| is the camera's timestamp for the captured
+ // frame; it is expected to have good accuracy, but it may use an
+ // arbitrary epoch and a small possibly free-running with a frequency
+ // slightly different from the system clock. |system_time_us| is the
+ // monotonic system time (in the same scale as rtc::TimeMicros) when
+ // the frame was captured; the application is expected to read the
+ // system time as soon as possible after frame capture, but it may
+ // suffer scheduling jitter or poor system clock resolution. The
+ // output |translated_camera_time_us| is a combined timestamp,
+ // taking advantage of the supposedly higher accuracy in the camera
+ // timestamp, but using the same epoch and frequency as system time.
+ bool AdaptFrame(int width,
+ int height,
+ int64_t camera_time_us,
+ int64_t system_time_us,
+ int* out_width,
+ int* out_height,
+ int* crop_width,
+ int* crop_height,
+ int* crop_x,
+ int* crop_y,
+ int64_t* translated_camera_time_us);
+
+ // Called when a frame has been captured and converted to a
+ // VideoFrame. OnFrame can be called directly by an implementation
+ // that does not use SignalFrameCaptured or OnFrameCaptured. The
+ // orig_width and orig_height are used only to produce stats.
+ void OnFrame(const webrtc::VideoFrame& frame,
+ int orig_width,
+ int orig_height);
+
+ VideoAdapter* video_adapter() { return &video_adapter_; }
+
+ void SetCaptureState(CaptureState state);
+
+ // subclasses override this virtual method to provide a vector of fourccs, in
+ // order of preference, that are expected by the media engine.
+ virtual bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) = 0;
+
+ // mutators to set private attributes
+ void SetId(const std::string& id) {
+ id_ = id;
+ }
+
+ void SetCaptureFormat(const VideoFormat* format) {
+ capture_format_.reset(format ? new VideoFormat(*format) : NULL);
+ }
+
+ void SetSupportedFormats(const std::vector<VideoFormat>& formats);
+
+ private:
+ void Construct();
+ // Get the distance between the desired format and the supported format.
+ // Return the max distance if they mismatch. See the implementation for
+ // details.
+ int64_t GetFormatDistance(const VideoFormat& desired,
+ const VideoFormat& supported);
+
+ // Updates filtered_supported_formats_ so that it contains the formats in
+ // supported_formats_ that fulfill all applied restrictions.
+ void UpdateFilteredSupportedFormats();
+ // Returns true if format doesn't fulfill all applied restrictions.
+ bool ShouldFilterFormat(const VideoFormat& format) const;
+
+ void UpdateInputSize(int width, int height);
+
+ rtc::ThreadChecker thread_checker_;
+ std::string id_;
+ CaptureState capture_state_;
+ std::unique_ptr<VideoFormat> capture_format_;
+ std::vector<VideoFormat> supported_formats_;
+ std::unique_ptr<VideoFormat> max_format_;
+ std::vector<VideoFormat> filtered_supported_formats_;
+
+ bool enable_camera_list_;
+ int scaled_width_; // Current output size from ComputeScale.
+ int scaled_height_;
+
+ rtc::VideoBroadcaster broadcaster_;
+ bool enable_video_adapter_;
+ VideoAdapter video_adapter_;
+
+ rtc::CriticalSection frame_stats_crit_;
+ // The captured frame size before potential adapation.
+ bool input_size_valid_ RTC_GUARDED_BY(frame_stats_crit_) = false;
+ int input_width_ RTC_GUARDED_BY(frame_stats_crit_);
+ int input_height_ RTC_GUARDED_BY(frame_stats_crit_);
+
+ // Whether capturer should apply rotation to the frame before
+ // passing it on to the registered sinks.
+ bool apply_rotation_;
+
+ // State for the timestamp translation.
+ rtc::TimestampAligner timestamp_aligner_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(VideoCapturer);
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_VIDEOCAPTURER_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/videocapturer_unittest.cc b/third_party/libwebrtc/webrtc/media/base/videocapturer_unittest.cc
new file mode 100644
index 0000000000..7450694d21
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videocapturer_unittest.cc
@@ -0,0 +1,786 @@
+/*
+ * Copyright (c) 2008 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include <memory>
+#include <vector>
+
+#include "media/base/fakevideocapturer.h"
+#include "media/base/fakevideorenderer.h"
+#include "media/base/testutils.h"
+#include "media/base/videocapturer.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/thread.h"
+
+using cricket::FakeVideoCapturer;
+
+namespace {
+
+const int kMsCallbackWait = 500;
+// For HD only the height matters.
+const int kMinHdHeight = 720;
+
+} // namespace
+
+class VideoCapturerTest
+ : public sigslot::has_slots<>,
+ public testing::Test {
+ public:
+ VideoCapturerTest()
+ : capture_state_(cricket::CS_STOPPED), num_state_changes_(0) {
+ InitCapturer(false);
+ }
+
+ protected:
+ void InitCapturer(bool is_screencast) {
+ capturer_ = std::unique_ptr<FakeVideoCapturer>(
+ new FakeVideoCapturer(is_screencast));
+ capturer_->SignalStateChange.connect(this,
+ &VideoCapturerTest::OnStateChange);
+ capturer_->AddOrUpdateSink(&renderer_, rtc::VideoSinkWants());
+ }
+ void InitScreencast() { InitCapturer(true); }
+
+ void OnStateChange(cricket::VideoCapturer*,
+ cricket::CaptureState capture_state) {
+ capture_state_ = capture_state;
+ ++num_state_changes_;
+ }
+ cricket::CaptureState capture_state() { return capture_state_; }
+ int num_state_changes() { return num_state_changes_; }
+
+ std::unique_ptr<cricket::FakeVideoCapturer> capturer_;
+ cricket::CaptureState capture_state_;
+ int num_state_changes_;
+ cricket::FakeVideoRenderer renderer_;
+ bool expects_rotation_applied_;
+};
+
+TEST_F(VideoCapturerTest, CaptureState) {
+ EXPECT_TRUE(capturer_->enable_video_adapter());
+ EXPECT_EQ(cricket::CS_RUNNING, capturer_->Start(cricket::VideoFormat(
+ 640,
+ 480,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420)));
+ EXPECT_TRUE(capturer_->IsRunning());
+ EXPECT_EQ_WAIT(cricket::CS_RUNNING, capture_state(), kMsCallbackWait);
+ EXPECT_EQ(1, num_state_changes());
+ capturer_->Stop();
+ EXPECT_EQ_WAIT(cricket::CS_STOPPED, capture_state(), kMsCallbackWait);
+ EXPECT_EQ(2, num_state_changes());
+ capturer_->Stop();
+ rtc::Thread::Current()->ProcessMessages(100);
+ EXPECT_EQ(2, num_state_changes());
+}
+
+TEST_F(VideoCapturerTest, ScreencastScaledOddWidth) {
+ InitScreencast();
+
+ int kWidth = 1281;
+ int kHeight = 720;
+
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(cricket::VideoFormat(kWidth, kHeight,
+ cricket::VideoFormat::FpsToInterval(5),
+ cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(formats);
+
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer_->Start(cricket::VideoFormat(
+ kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420)));
+ EXPECT_TRUE(capturer_->IsRunning());
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(1, renderer_.num_rendered_frames());
+ EXPECT_EQ(kWidth, renderer_.width());
+ EXPECT_EQ(kHeight, renderer_.height());
+}
+
+TEST_F(VideoCapturerTest, TestRotationAppliedBySource) {
+ int kWidth = 800;
+ int kHeight = 400;
+ int frame_count = 0;
+
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(cricket::VideoFormat(kWidth, kHeight,
+ cricket::VideoFormat::FpsToInterval(5),
+ cricket::FOURCC_I420));
+
+ capturer_->ResetSupportedFormats(formats);
+ rtc::VideoSinkWants wants;
+ // |capturer_| should compensate rotation.
+ wants.rotation_applied = true;
+ capturer_->AddOrUpdateSink(&renderer_, wants);
+
+ // capturer_ should compensate rotation as default.
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer_->Start(cricket::VideoFormat(
+ kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420)));
+ EXPECT_TRUE(capturer_->IsRunning());
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+
+ // If the frame's rotation is compensated anywhere in the pipeline based on
+ // the rotation information, the renderer should be given the right dimension
+ // such that the frame could be rendered.
+
+ capturer_->SetRotation(webrtc::kVideoRotation_90);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ // Swapped width and height
+ EXPECT_EQ(kWidth, renderer_.height());
+ EXPECT_EQ(kHeight, renderer_.width());
+ EXPECT_EQ(webrtc::kVideoRotation_0, renderer_.rotation());
+
+ capturer_->SetRotation(webrtc::kVideoRotation_270);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ // Swapped width and height
+ EXPECT_EQ(kWidth, renderer_.height());
+ EXPECT_EQ(kHeight, renderer_.width());
+ EXPECT_EQ(webrtc::kVideoRotation_0, renderer_.rotation());
+
+ capturer_->SetRotation(webrtc::kVideoRotation_180);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ // Back to normal width and height
+ EXPECT_EQ(kWidth, renderer_.width());
+ EXPECT_EQ(kHeight, renderer_.height());
+ EXPECT_EQ(webrtc::kVideoRotation_0, renderer_.rotation());
+}
+
+TEST_F(VideoCapturerTest, TestRotationAppliedBySinkByDefault) {
+ int kWidth = 800;
+ int kHeight = 400;
+
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(cricket::VideoFormat(kWidth, kHeight,
+ cricket::VideoFormat::FpsToInterval(5),
+ cricket::FOURCC_I420));
+
+ capturer_->ResetSupportedFormats(formats);
+
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer_->Start(cricket::VideoFormat(
+ kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420)));
+ EXPECT_TRUE(capturer_->IsRunning());
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+
+ // If the frame's rotation is compensated anywhere in the pipeline, the frame
+ // won't have its original dimension out from capturer. Since the renderer
+ // here has the same dimension as the capturer, it will skip that frame as the
+ // resolution won't match anymore.
+
+ int frame_count = 0;
+ capturer_->SetRotation(webrtc::kVideoRotation_0);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ EXPECT_EQ(capturer_->GetRotation(), renderer_.rotation());
+
+ capturer_->SetRotation(webrtc::kVideoRotation_90);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ EXPECT_EQ(capturer_->GetRotation(), renderer_.rotation());
+
+ capturer_->SetRotation(webrtc::kVideoRotation_180);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ EXPECT_EQ(capturer_->GetRotation(), renderer_.rotation());
+
+ capturer_->SetRotation(webrtc::kVideoRotation_270);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ EXPECT_EQ(capturer_->GetRotation(), renderer_.rotation());
+}
+
+TEST_F(VideoCapturerTest, TestRotationAppliedBySourceWhenDifferentWants) {
+ int kWidth = 800;
+ int kHeight = 400;
+
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(cricket::VideoFormat(kWidth, kHeight,
+ cricket::VideoFormat::FpsToInterval(5),
+ cricket::FOURCC_I420));
+
+ capturer_->ResetSupportedFormats(formats);
+ rtc::VideoSinkWants wants;
+ // capturer_ should not compensate rotation.
+ wants.rotation_applied = false;
+ capturer_->AddOrUpdateSink(&renderer_, wants);
+
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer_->Start(cricket::VideoFormat(
+ kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420)));
+ EXPECT_TRUE(capturer_->IsRunning());
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+
+ int frame_count = 0;
+ capturer_->SetRotation(webrtc::kVideoRotation_90);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ EXPECT_EQ(capturer_->GetRotation(), renderer_.rotation());
+
+ // Add another sink that wants frames to be rotated.
+ cricket::FakeVideoRenderer renderer2;
+ wants.rotation_applied = true;
+ capturer_->AddOrUpdateSink(&renderer2, wants);
+
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ EXPECT_EQ(1, renderer2.num_rendered_frames());
+ EXPECT_EQ(webrtc::kVideoRotation_0, renderer_.rotation());
+ EXPECT_EQ(webrtc::kVideoRotation_0, renderer2.rotation());
+}
+
+// TODO(nisse): This test doesn't quite fit here. It tests two things:
+// Aggregation of VideoSinkWants, which is the responsibility of
+// VideoBroadcaster, and translation of VideoSinkWants to actual
+// resolution, which is the responsibility of the VideoAdapter.
+TEST_F(VideoCapturerTest, SinkWantsMaxPixelAndMaxPixelCountStepUp) {
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer_->Start(cricket::VideoFormat(
+ 1280, 720, cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420)));
+ EXPECT_TRUE(capturer_->IsRunning());
+
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(1, renderer_.num_rendered_frames());
+ EXPECT_EQ(1280, renderer_.width());
+ EXPECT_EQ(720, renderer_.height());
+
+ // Request a lower resolution. The output resolution will have a resolution
+ // with less than or equal to |wants.max_pixel_count| depending on how the
+ // capturer can scale the input frame size.
+ rtc::VideoSinkWants wants;
+ wants.max_pixel_count = 1280 * 720 * 3 / 5;
+ capturer_->AddOrUpdateSink(&renderer_, wants);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(2, renderer_.num_rendered_frames());
+ EXPECT_EQ(960, renderer_.width());
+ EXPECT_EQ(540, renderer_.height());
+
+ // Request a lower resolution.
+ wants.max_pixel_count = (renderer_.width() * renderer_.height() * 3) / 5;
+ capturer_->AddOrUpdateSink(&renderer_, wants);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(3, renderer_.num_rendered_frames());
+ EXPECT_EQ(640, renderer_.width());
+ EXPECT_EQ(360, renderer_.height());
+
+ // Adding a new renderer should not affect resolution.
+ cricket::FakeVideoRenderer renderer2;
+ capturer_->AddOrUpdateSink(&renderer2, rtc::VideoSinkWants());
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(4, renderer_.num_rendered_frames());
+ EXPECT_EQ(640, renderer_.width());
+ EXPECT_EQ(360, renderer_.height());
+ EXPECT_EQ(1, renderer2.num_rendered_frames());
+ EXPECT_EQ(640, renderer2.width());
+ EXPECT_EQ(360, renderer2.height());
+
+ // Request higher resolution.
+ wants.target_pixel_count.emplace((wants.max_pixel_count * 5) / 3);
+ wants.max_pixel_count = wants.max_pixel_count * 4;
+ capturer_->AddOrUpdateSink(&renderer_, wants);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(5, renderer_.num_rendered_frames());
+ EXPECT_EQ(960, renderer_.width());
+ EXPECT_EQ(540, renderer_.height());
+ EXPECT_EQ(2, renderer2.num_rendered_frames());
+ EXPECT_EQ(960, renderer2.width());
+ EXPECT_EQ(540, renderer2.height());
+
+ // Updating with no wants should not affect resolution.
+ capturer_->AddOrUpdateSink(&renderer2, rtc::VideoSinkWants());
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(6, renderer_.num_rendered_frames());
+ EXPECT_EQ(960, renderer_.width());
+ EXPECT_EQ(540, renderer_.height());
+ EXPECT_EQ(3, renderer2.num_rendered_frames());
+ EXPECT_EQ(960, renderer2.width());
+ EXPECT_EQ(540, renderer2.height());
+
+ // But resetting the wants should reset the resolution to what the camera is
+ // opened with.
+ capturer_->AddOrUpdateSink(&renderer_, rtc::VideoSinkWants());
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(7, renderer_.num_rendered_frames());
+ EXPECT_EQ(1280, renderer_.width());
+ EXPECT_EQ(720, renderer_.height());
+ EXPECT_EQ(4, renderer2.num_rendered_frames());
+ EXPECT_EQ(1280, renderer2.width());
+ EXPECT_EQ(720, renderer2.height());
+}
+
+TEST_F(VideoCapturerTest, TestFourccMatch) {
+ cricket::VideoFormat desired(640, 480,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_ANY);
+ cricket::VideoFormat best;
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.fourcc = cricket::FOURCC_MJPG;
+ EXPECT_FALSE(capturer_->GetBestCaptureFormat(desired, &best));
+
+ desired.fourcc = cricket::FOURCC_I420;
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+}
+
+TEST_F(VideoCapturerTest, TestResolutionMatch) {
+ cricket::VideoFormat desired(1920, 1080,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_ANY);
+ cricket::VideoFormat best;
+ // Ask for 1920x1080. Get HD 1280x720 which is the highest.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(1280, best.width);
+ EXPECT_EQ(720, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 360;
+ desired.height = 250;
+ // Ask for a little higher than QVGA. Get QVGA.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 480;
+ desired.height = 270;
+ // Ask for HVGA. Get VGA.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 320;
+ desired.height = 240;
+ // Ask for QVGA. Get QVGA.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 80;
+ desired.height = 60;
+ // Ask for lower than QQVGA. Get QQVGA, which is the lowest.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(160, best.width);
+ EXPECT_EQ(120, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+}
+
+TEST_F(VideoCapturerTest, TestHDResolutionMatch) {
+ // Add some HD formats typical of a mediocre HD webcam.
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(960, 544,
+ cricket::VideoFormat::FpsToInterval(24), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(15), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(2592, 1944,
+ cricket::VideoFormat::FpsToInterval(7), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(formats);
+
+ cricket::VideoFormat desired(960, 720,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_ANY);
+ cricket::VideoFormat best;
+ // Ask for 960x720 30 fps. Get qHD 24 fps
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(960, best.width);
+ EXPECT_EQ(544, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(24), best.interval);
+
+ desired.width = 960;
+ desired.height = 544;
+ desired.interval = cricket::VideoFormat::FpsToInterval(30);
+ // Ask for qHD 30 fps. Get qHD 24 fps
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(960, best.width);
+ EXPECT_EQ(544, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(24), best.interval);
+
+ desired.width = 360;
+ desired.height = 250;
+ desired.interval = cricket::VideoFormat::FpsToInterval(30);
+ // Ask for a little higher than QVGA. Get QVGA.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 480;
+ desired.height = 270;
+ // Ask for HVGA. Get VGA.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 320;
+ desired.height = 240;
+ // Ask for QVGA. Get QVGA.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 160;
+ desired.height = 120;
+ // Ask for lower than QVGA. Get QVGA, which is the lowest.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 1280;
+ desired.height = 720;
+ // Ask for HD. 720p fps is too low. Get VGA which has 30 fps.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 1280;
+ desired.height = 720;
+ desired.interval = cricket::VideoFormat::FpsToInterval(15);
+ // Ask for HD 15 fps. Fps matches. Get HD
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(1280, best.width);
+ EXPECT_EQ(720, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(15), best.interval);
+
+ desired.width = 1920;
+ desired.height = 1080;
+ desired.interval = cricket::VideoFormat::FpsToInterval(30);
+ // Ask for 1080p. Fps of HD formats is too low. Get VGA which can do 30 fps.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+}
+
+// Some cameras support 320x240 and 320x640. Verify we choose 320x240.
+TEST_F(VideoCapturerTest, TestStrangeFormats) {
+ std::vector<cricket::VideoFormat> supported_formats;
+ supported_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(320, 640,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ std::vector<cricket::VideoFormat> required_formats;
+ required_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ required_formats.push_back(cricket::VideoFormat(320, 200,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ required_formats.push_back(cricket::VideoFormat(320, 180,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ cricket::VideoFormat best;
+ for (size_t i = 0; i < required_formats.size(); ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ }
+
+ supported_formats.clear();
+ supported_formats.push_back(cricket::VideoFormat(320, 640,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ for (size_t i = 0; i < required_formats.size(); ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ }
+}
+
+// Some cameras only have very low fps. Verify we choose something sensible.
+TEST_F(VideoCapturerTest, TestPoorFpsFormats) {
+ // all formats are low framerate
+ std::vector<cricket::VideoFormat> supported_formats;
+ supported_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(10), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(7), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(2), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ std::vector<cricket::VideoFormat> required_formats;
+ required_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ required_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ cricket::VideoFormat best;
+ for (size_t i = 0; i < required_formats.size(); ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(required_formats[i].width, best.width);
+ EXPECT_EQ(required_formats[i].height, best.height);
+ }
+
+ // Increase framerate of 320x240. Expect low fps VGA avoided.
+ supported_formats.clear();
+ supported_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(7), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(2), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ for (size_t i = 0; i < required_formats.size(); ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ }
+}
+
+// Some cameras support same size with different frame rates. Verify we choose
+// the frame rate properly.
+TEST_F(VideoCapturerTest, TestSameSizeDifferentFpsFormats) {
+ std::vector<cricket::VideoFormat> supported_formats;
+ supported_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(10), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(20), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ std::vector<cricket::VideoFormat> required_formats = supported_formats;
+ cricket::VideoFormat best;
+ for (size_t i = 0; i < required_formats.size(); ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ EXPECT_EQ(required_formats[i].interval, best.interval);
+ }
+}
+
+// Some cameras support the correct resolution but at a lower fps than
+// we'd like. This tests we get the expected resolution and fps.
+TEST_F(VideoCapturerTest, TestFpsFormats) {
+ // We have VGA but low fps. Choose VGA, not HD
+ std::vector<cricket::VideoFormat> supported_formats;
+ supported_formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(15), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 400,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 360,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ std::vector<cricket::VideoFormat> required_formats;
+ required_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_ANY));
+ required_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(20), cricket::FOURCC_ANY));
+ required_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(10), cricket::FOURCC_ANY));
+ cricket::VideoFormat best;
+
+ // Expect 30 fps to choose 30 fps format.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[0], &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(400, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ // Expect 20 fps to choose 30 fps format.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[1], &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(400, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ // Expect 10 fps to choose 15 fps format and set fps to 15.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[2], &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(15), best.interval);
+
+ // We have VGA 60 fps and 15 fps. Choose best fps.
+ supported_formats.clear();
+ supported_formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(60), cricket::FOURCC_MJPG));
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(15), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 400,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 360,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ // Expect 30 fps to choose 60 fps format and will set best fps to 60.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[0], &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(60), best.interval);
+
+ // Expect 20 fps to choose 60 fps format, and will set best fps to 60.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[1], &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(60), best.interval);
+
+ // Expect 10 fps to choose 15 fps.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[2], &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(15), best.interval);
+}
+
+TEST_F(VideoCapturerTest, TestRequest16x10_9) {
+ std::vector<cricket::VideoFormat> supported_formats;
+ // We do not support HD, expect 4x3 for 4x3, 16x10, and 16x9 requests.
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 400,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 360,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ std::vector<cricket::VideoFormat> required_formats = supported_formats;
+ cricket::VideoFormat best;
+ // Expect 4x3, 16x10, and 16x9 requests are respected.
+ for (size_t i = 0; i < required_formats.size(); ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(required_formats[i].width, best.width);
+ EXPECT_EQ(required_formats[i].height, best.height);
+ }
+
+ // We do not support 16x9 HD, expect 4x3 for 4x3, 16x10, and 16x9 requests.
+ supported_formats.clear();
+ supported_formats.push_back(cricket::VideoFormat(960, 720,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 400,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 360,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ // Expect 4x3, 16x10, and 16x9 requests are respected.
+ for (size_t i = 0; i < required_formats.size(); ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(required_formats[i].width, best.width);
+ EXPECT_EQ(required_formats[i].height, best.height);
+ }
+
+ // We support 16x9HD, Expect 4x3, 16x10, and 16x9 requests are respected.
+ supported_formats.clear();
+ supported_formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 400,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 360,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ // Expect 4x3 for 4x3 and 16x10 requests.
+ for (size_t i = 0; i < required_formats.size() - 1; ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(required_formats[i].width, best.width);
+ EXPECT_EQ(required_formats[i].height, best.height);
+ }
+
+ // Expect 16x9 for 16x9 request.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[2], &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(360, best.height);
+}
+
+bool HdFormatInList(const std::vector<cricket::VideoFormat>& formats) {
+ for (std::vector<cricket::VideoFormat>::const_iterator found =
+ formats.begin(); found != formats.end(); ++found) {
+ if (found->height >= kMinHdHeight) {
+ return true;
+ }
+ }
+ return false;
+}
+
+TEST_F(VideoCapturerTest, Whitelist) {
+ // The definition of HD only applies to the height. Set the HD width to the
+ // smallest legal number to document this fact in this test.
+ const int kMinHdWidth = 1;
+ cricket::VideoFormat hd_format(kMinHdWidth,
+ kMinHdHeight,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420);
+ cricket::VideoFormat vga_format(640, 480,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420);
+ std::vector<cricket::VideoFormat> formats = *capturer_->GetSupportedFormats();
+ formats.push_back(hd_format);
+
+ // Enable whitelist. Expect HD not in list.
+ capturer_->set_enable_camera_list(true);
+ capturer_->ResetSupportedFormats(formats);
+ EXPECT_TRUE(HdFormatInList(*capturer_->GetSupportedFormats()));
+ capturer_->ConstrainSupportedFormats(vga_format);
+ EXPECT_FALSE(HdFormatInList(*capturer_->GetSupportedFormats()));
+
+ // Disable whitelist. Expect HD in list.
+ capturer_->set_enable_camera_list(false);
+ capturer_->ResetSupportedFormats(formats);
+ EXPECT_TRUE(HdFormatInList(*capturer_->GetSupportedFormats()));
+ capturer_->ConstrainSupportedFormats(vga_format);
+ EXPECT_TRUE(HdFormatInList(*capturer_->GetSupportedFormats()));
+}
+
+TEST_F(VideoCapturerTest, BlacklistAllFormats) {
+ cricket::VideoFormat vga_format(640, 480,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420);
+ std::vector<cricket::VideoFormat> supported_formats;
+ // Mock a device that only supports HD formats.
+ supported_formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(1920, 1080,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+ EXPECT_EQ(2u, capturer_->GetSupportedFormats()->size());
+ // Now, enable the list, which would exclude both formats. However, since
+ // only HD formats are available, we refuse to filter at all, so we don't
+ // break this camera.
+ capturer_->set_enable_camera_list(true);
+ capturer_->ConstrainSupportedFormats(vga_format);
+ EXPECT_EQ(2u, capturer_->GetSupportedFormats()->size());
+ // To make sure it's not just the camera list being broken, add in VGA and
+ // try again. This time, only the VGA format should be there.
+ supported_formats.push_back(vga_format);
+ capturer_->ResetSupportedFormats(supported_formats);
+ ASSERT_EQ(1u, capturer_->GetSupportedFormats()->size());
+ EXPECT_EQ(vga_format.height, capturer_->GetSupportedFormats()->at(0).height);
+}
diff --git a/third_party/libwebrtc/webrtc/media/base/videocapturerfactory.h b/third_party/libwebrtc/webrtc/media/base/videocapturerfactory.h
new file mode 100644
index 0000000000..219e95bb0d
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videocapturerfactory.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_VIDEOCAPTURERFACTORY_H_
+#define MEDIA_BASE_VIDEOCAPTURERFACTORY_H_
+
+#include <memory>
+
+#include "media/base/device.h"
+
+namespace cricket {
+
+class VideoCapturer;
+
+class VideoDeviceCapturerFactory {
+ public:
+ VideoDeviceCapturerFactory() {}
+ virtual ~VideoDeviceCapturerFactory() {}
+
+ virtual std::unique_ptr<VideoCapturer> Create(const Device& device) = 0;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_VIDEOCAPTURERFACTORY_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/videocommon.cc b/third_party/libwebrtc/webrtc/media/base/videocommon.cc
new file mode 100644
index 0000000000..e5168b55ca
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videocommon.cc
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/videocommon.h"
+
+#include <limits.h> // For INT_MAX
+#include <math.h>
+#include <sstream>
+
+#include "rtc_base/arraysize.h"
+
+namespace cricket {
+
+struct FourCCAliasEntry {
+ uint32_t alias;
+ uint32_t canonical;
+};
+
+static const FourCCAliasEntry kFourCCAliases[] = {
+ {FOURCC_IYUV, FOURCC_I420},
+ {FOURCC_YU16, FOURCC_I422},
+ {FOURCC_YU24, FOURCC_I444},
+ {FOURCC_YUYV, FOURCC_YUY2},
+ {FOURCC_YUVS, FOURCC_YUY2},
+ {FOURCC_HDYC, FOURCC_UYVY},
+ {FOURCC_2VUY, FOURCC_UYVY},
+ {FOURCC_JPEG, FOURCC_MJPG}, // Note: JPEG has DHT while MJPG does not.
+ {FOURCC_DMB1, FOURCC_MJPG},
+ {FOURCC_BA81, FOURCC_BGGR},
+ {FOURCC_RGB3, FOURCC_RAW},
+ {FOURCC_BGR3, FOURCC_24BG},
+ {FOURCC_CM32, FOURCC_BGRA},
+ {FOURCC_CM24, FOURCC_RAW},
+};
+
+uint32_t CanonicalFourCC(uint32_t fourcc) {
+ for (uint32_t i = 0; i < arraysize(kFourCCAliases); ++i) {
+ if (kFourCCAliases[i].alias == fourcc) {
+ return kFourCCAliases[i].canonical;
+ }
+ }
+ // Not an alias, so return it as-is.
+ return fourcc;
+}
+
+// The C++ standard requires a namespace-scope definition of static const
+// integral types even when they are initialized in the declaration (see
+// [class.static.data]/4), but MSVC with /Ze is non-conforming and treats that
+// as a multiply defined symbol error. See Also:
+// http://msdn.microsoft.com/en-us/library/34h23df8.aspx
+#ifndef _MSC_EXTENSIONS
+const int64_t VideoFormat::kMinimumInterval; // Initialized in header.
+#endif
+
+std::string VideoFormat::ToString() const {
+ std::string fourcc_name = GetFourccName(fourcc) + " ";
+ for (std::string::const_iterator i = fourcc_name.begin();
+ i < fourcc_name.end(); ++i) {
+ // Test character is printable; Avoid isprint() which asserts on negatives.
+ if (*i < 32 || *i >= 127) {
+ fourcc_name = "";
+ break;
+ }
+ }
+
+ std::ostringstream ss;
+ ss << fourcc_name << width << "x" << height << "x"
+ << IntervalToFpsFloat(interval);
+ return ss.str();
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/videocommon.h b/third_party/libwebrtc/webrtc/media/base/videocommon.h
new file mode 100644
index 0000000000..264482b7c8
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videocommon.h
@@ -0,0 +1,229 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Common definition for video, including fourcc and VideoFormat.
+
+#ifndef MEDIA_BASE_VIDEOCOMMON_H_
+#define MEDIA_BASE_VIDEOCOMMON_H_
+
+#include <stdint.h>
+
+#include <string>
+
+#include "rtc_base/timeutils.h"
+
+namespace cricket {
+
+// TODO(janahan): For now, a hard-coded ssrc is used as the video ssrc.
+// This is because when the video frame is passed to the mediaprocessor for
+// processing, it doesn't have the correct ssrc. Since currently only Tx
+// Video processing is supported, this is ok. When we switch over to trigger
+// from capturer, this should be fixed and this const removed.
+const uint32_t kDummyVideoSsrc = 0xFFFFFFFF;
+
+// Minimum interval is 10k fps.
+#define FPS_TO_INTERVAL(fps) \
+ (fps ? rtc::kNumNanosecsPerSec / fps : \
+ rtc::kNumNanosecsPerSec / 10000)
+
+//////////////////////////////////////////////////////////////////////////////
+// Definition of FourCC codes
+//////////////////////////////////////////////////////////////////////////////
+// Convert four characters to a FourCC code.
+// Needs to be a macro otherwise the OS X compiler complains when the kFormat*
+// constants are used in a switch.
+#define FOURCC(a, b, c, d) \
+ ((static_cast<uint32_t>(a)) | (static_cast<uint32_t>(b) << 8) | \
+ (static_cast<uint32_t>(c) << 16) | (static_cast<uint32_t>(d) << 24))
+// Some pages discussing FourCC codes:
+// http://www.fourcc.org/yuv.php
+// http://v4l2spec.bytesex.org/spec/book1.htm
+// http://developer.apple.com/quicktime/icefloe/dispatch020.html
+// http://msdn.microsoft.com/library/windows/desktop/dd206750.aspx#nv12
+// http://people.xiph.org/~xiphmont/containers/nut/nut4cc.txt
+
+// FourCC codes grouped according to implementation efficiency.
+// Primary formats should convert in 1 efficient step.
+// Secondary formats are converted in 2 steps.
+// Auxilliary formats call primary converters.
+enum FourCC {
+ // 9 Primary YUV formats: 5 planar, 2 biplanar, 2 packed.
+ FOURCC_I420 = FOURCC('I', '4', '2', '0'),
+ FOURCC_I422 = FOURCC('I', '4', '2', '2'),
+ FOURCC_I444 = FOURCC('I', '4', '4', '4'),
+ FOURCC_I411 = FOURCC('I', '4', '1', '1'),
+ FOURCC_I400 = FOURCC('I', '4', '0', '0'),
+ FOURCC_NV21 = FOURCC('N', 'V', '2', '1'),
+ FOURCC_NV12 = FOURCC('N', 'V', '1', '2'),
+ FOURCC_YUY2 = FOURCC('Y', 'U', 'Y', '2'),
+ FOURCC_UYVY = FOURCC('U', 'Y', 'V', 'Y'),
+
+ // 2 Secondary YUV formats: row biplanar.
+ FOURCC_M420 = FOURCC('M', '4', '2', '0'),
+
+ // 9 Primary RGB formats: 4 32 bpp, 2 24 bpp, 3 16 bpp.
+ FOURCC_ARGB = FOURCC('A', 'R', 'G', 'B'),
+ FOURCC_BGRA = FOURCC('B', 'G', 'R', 'A'),
+ FOURCC_ABGR = FOURCC('A', 'B', 'G', 'R'),
+ FOURCC_24BG = FOURCC('2', '4', 'B', 'G'),
+ FOURCC_RAW = FOURCC('r', 'a', 'w', ' '),
+ FOURCC_RGBA = FOURCC('R', 'G', 'B', 'A'),
+ FOURCC_RGBP = FOURCC('R', 'G', 'B', 'P'), // bgr565.
+ FOURCC_RGBO = FOURCC('R', 'G', 'B', 'O'), // abgr1555.
+ FOURCC_R444 = FOURCC('R', '4', '4', '4'), // argb4444.
+
+ // 4 Secondary RGB formats: 4 Bayer Patterns.
+ FOURCC_RGGB = FOURCC('R', 'G', 'G', 'B'),
+ FOURCC_BGGR = FOURCC('B', 'G', 'G', 'R'),
+ FOURCC_GRBG = FOURCC('G', 'R', 'B', 'G'),
+ FOURCC_GBRG = FOURCC('G', 'B', 'R', 'G'),
+
+ // 1 Primary Compressed YUV format.
+ FOURCC_MJPG = FOURCC('M', 'J', 'P', 'G'),
+
+ // 5 Auxiliary YUV variations: 3 with U and V planes are swapped, 1 Alias.
+ FOURCC_YV12 = FOURCC('Y', 'V', '1', '2'),
+ FOURCC_YV16 = FOURCC('Y', 'V', '1', '6'),
+ FOURCC_YV24 = FOURCC('Y', 'V', '2', '4'),
+ FOURCC_YU12 = FOURCC('Y', 'U', '1', '2'), // Linux version of I420.
+ FOURCC_J420 = FOURCC('J', '4', '2', '0'),
+ FOURCC_J400 = FOURCC('J', '4', '0', '0'),
+
+ // 14 Auxiliary aliases. CanonicalFourCC() maps these to canonical fourcc.
+ FOURCC_IYUV = FOURCC('I', 'Y', 'U', 'V'), // Alias for I420.
+ FOURCC_YU16 = FOURCC('Y', 'U', '1', '6'), // Alias for I422.
+ FOURCC_YU24 = FOURCC('Y', 'U', '2', '4'), // Alias for I444.
+ FOURCC_YUYV = FOURCC('Y', 'U', 'Y', 'V'), // Alias for YUY2.
+ FOURCC_YUVS = FOURCC('y', 'u', 'v', 's'), // Alias for YUY2 on Mac.
+ FOURCC_HDYC = FOURCC('H', 'D', 'Y', 'C'), // Alias for UYVY.
+ FOURCC_2VUY = FOURCC('2', 'v', 'u', 'y'), // Alias for UYVY on Mac.
+ FOURCC_JPEG = FOURCC('J', 'P', 'E', 'G'), // Alias for MJPG.
+ FOURCC_DMB1 = FOURCC('d', 'm', 'b', '1'), // Alias for MJPG on Mac.
+ FOURCC_BA81 = FOURCC('B', 'A', '8', '1'), // Alias for BGGR.
+ FOURCC_RGB3 = FOURCC('R', 'G', 'B', '3'), // Alias for RAW.
+ FOURCC_BGR3 = FOURCC('B', 'G', 'R', '3'), // Alias for 24BG.
+ FOURCC_CM32 = FOURCC(0, 0, 0, 32), // Alias for BGRA kCMPixelFormat_32ARGB
+ FOURCC_CM24 = FOURCC(0, 0, 0, 24), // Alias for RAW kCMPixelFormat_24RGB
+
+ // 1 Auxiliary compressed YUV format set aside for capturer.
+ FOURCC_H264 = FOURCC('H', '2', '6', '4'),
+};
+
+// Match any fourcc.
+
+// We move this out of the enum because using it in many places caused
+// the compiler to get grumpy, presumably since the above enum is
+// backed by an int.
+static const uint32_t FOURCC_ANY = 0xFFFFFFFF;
+
+// Converts fourcc aliases into canonical ones.
+uint32_t CanonicalFourCC(uint32_t fourcc);
+
+// Get FourCC code as a string.
+inline std::string GetFourccName(uint32_t fourcc) {
+ std::string name;
+ name.push_back(static_cast<char>(fourcc & 0xFF));
+ name.push_back(static_cast<char>((fourcc >> 8) & 0xFF));
+ name.push_back(static_cast<char>((fourcc >> 16) & 0xFF));
+ name.push_back(static_cast<char>((fourcc >> 24) & 0xFF));
+ return name;
+}
+
+//////////////////////////////////////////////////////////////////////////////
+// Definition of VideoFormat.
+//////////////////////////////////////////////////////////////////////////////
+
+// VideoFormat with Plain Old Data for global variables.
+struct VideoFormatPod {
+ int width; // Number of pixels.
+ int height; // Number of pixels.
+ int64_t interval; // Nanoseconds.
+ uint32_t fourcc; // Color space. FOURCC_ANY means that any color space is OK.
+};
+
+struct VideoFormat : VideoFormatPod {
+ static const int64_t kMinimumInterval =
+ rtc::kNumNanosecsPerSec / 10000; // 10k fps.
+
+ VideoFormat() {
+ Construct(0, 0, 0, 0);
+ }
+
+ VideoFormat(int w, int h, int64_t interval_ns, uint32_t cc) {
+ Construct(w, h, interval_ns, cc);
+ }
+
+ explicit VideoFormat(const VideoFormatPod& format) {
+ Construct(format.width, format.height, format.interval, format.fourcc);
+ }
+
+ void Construct(int w, int h, int64_t interval_ns, uint32_t cc) {
+ width = w;
+ height = h;
+ interval = interval_ns;
+ fourcc = cc;
+ }
+
+ static int64_t FpsToInterval(int fps) {
+ return fps ? rtc::kNumNanosecsPerSec / fps : kMinimumInterval;
+ }
+
+ static int IntervalToFps(int64_t interval) {
+ if (!interval) {
+ return 0;
+ }
+ return static_cast<int>(rtc::kNumNanosecsPerSec / interval);
+ }
+
+ static float IntervalToFpsFloat(int64_t interval) {
+ if (!interval) {
+ return 0.f;
+ }
+ return static_cast<float>(rtc::kNumNanosecsPerSec) /
+ static_cast<float>(interval);
+ }
+
+ bool operator==(const VideoFormat& format) const {
+ return width == format.width && height == format.height &&
+ interval == format.interval && fourcc == format.fourcc;
+ }
+
+ bool operator!=(const VideoFormat& format) const {
+ return !(*this == format);
+ }
+
+ bool operator<(const VideoFormat& format) const {
+ return (fourcc < format.fourcc) ||
+ (fourcc == format.fourcc && width < format.width) ||
+ (fourcc == format.fourcc && width == format.width &&
+ height < format.height) ||
+ (fourcc == format.fourcc && width == format.width &&
+ height == format.height && interval > format.interval);
+ }
+
+ int framerate() const { return IntervalToFps(interval); }
+
+ // Check if both width and height are 0.
+ bool IsSize0x0() const { return 0 == width && 0 == height; }
+
+ // Check if this format is less than another one by comparing the resolution
+ // and frame rate.
+ bool IsPixelRateLess(const VideoFormat& format) const {
+ return width * height * framerate() <
+ format.width * format.height * format.framerate();
+ }
+
+ // Get a string presentation in the form of "fourcc width x height x fps"
+ std::string ToString() const;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_VIDEOCOMMON_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/videocommon_unittest.cc b/third_party/libwebrtc/webrtc/media/base/videocommon_unittest.cc
new file mode 100644
index 0000000000..0e29375b2a
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videocommon_unittest.cc
@@ -0,0 +1,94 @@
+/*
+ * Copyright (c) 2008 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/videocommon.h"
+#include "rtc_base/gunit.h"
+
+namespace cricket {
+
+TEST(VideoCommonTest, TestCanonicalFourCC) {
+ // Canonical fourccs are not changed.
+ EXPECT_EQ(FOURCC_I420, CanonicalFourCC(FOURCC_I420));
+ // The special FOURCC_ANY value is not changed.
+ EXPECT_EQ(FOURCC_ANY, CanonicalFourCC(FOURCC_ANY));
+ // Aliases are translated to the canonical equivalent.
+ EXPECT_EQ(FOURCC_I420, CanonicalFourCC(FOURCC_IYUV));
+ EXPECT_EQ(FOURCC_I422, CanonicalFourCC(FOURCC_YU16));
+ EXPECT_EQ(FOURCC_I444, CanonicalFourCC(FOURCC_YU24));
+ EXPECT_EQ(FOURCC_YUY2, CanonicalFourCC(FOURCC_YUYV));
+ EXPECT_EQ(FOURCC_YUY2, CanonicalFourCC(FOURCC_YUVS));
+ EXPECT_EQ(FOURCC_UYVY, CanonicalFourCC(FOURCC_HDYC));
+ EXPECT_EQ(FOURCC_UYVY, CanonicalFourCC(FOURCC_2VUY));
+ EXPECT_EQ(FOURCC_MJPG, CanonicalFourCC(FOURCC_JPEG));
+ EXPECT_EQ(FOURCC_MJPG, CanonicalFourCC(FOURCC_DMB1));
+ EXPECT_EQ(FOURCC_BGGR, CanonicalFourCC(FOURCC_BA81));
+ EXPECT_EQ(FOURCC_RAW, CanonicalFourCC(FOURCC_RGB3));
+ EXPECT_EQ(FOURCC_24BG, CanonicalFourCC(FOURCC_BGR3));
+ EXPECT_EQ(FOURCC_BGRA, CanonicalFourCC(FOURCC_CM32));
+ EXPECT_EQ(FOURCC_RAW, CanonicalFourCC(FOURCC_CM24));
+}
+
+// Test conversion between interval and fps
+TEST(VideoCommonTest, TestVideoFormatFps) {
+ EXPECT_EQ(VideoFormat::kMinimumInterval, VideoFormat::FpsToInterval(0));
+ EXPECT_EQ(rtc::kNumNanosecsPerSec / 20, VideoFormat::FpsToInterval(20));
+ EXPECT_EQ(20, VideoFormat::IntervalToFps(rtc::kNumNanosecsPerSec / 20));
+ EXPECT_EQ(0, VideoFormat::IntervalToFps(0));
+}
+
+// Test IsSize0x0
+TEST(VideoCommonTest, TestVideoFormatIsSize0x0) {
+ VideoFormat format;
+ EXPECT_TRUE(format.IsSize0x0());
+ format.width = 320;
+ EXPECT_FALSE(format.IsSize0x0());
+}
+
+// Test ToString: print fourcc when it is printable.
+TEST(VideoCommonTest, TestVideoFormatToString) {
+ VideoFormat format;
+ EXPECT_EQ("0x0x0", format.ToString());
+
+ format.fourcc = FOURCC_I420;
+ format.width = 640;
+ format.height = 480;
+ format.interval = VideoFormat::FpsToInterval(20);
+ EXPECT_EQ("I420 640x480x20", format.ToString());
+
+ format.fourcc = FOURCC_ANY;
+ format.width = 640;
+ format.height = 480;
+ format.interval = VideoFormat::FpsToInterval(20);
+ EXPECT_EQ("640x480x20", format.ToString());
+}
+
+// Test comparison.
+TEST(VideoCommonTest, TestVideoFormatCompare) {
+ VideoFormat format(640, 480, VideoFormat::FpsToInterval(20), FOURCC_I420);
+ VideoFormat format2;
+ EXPECT_NE(format, format2);
+
+ // Same pixelrate, different fourcc.
+ format2 = format;
+ format2.fourcc = FOURCC_YUY2;
+ EXPECT_NE(format, format2);
+ EXPECT_FALSE(format.IsPixelRateLess(format2) ||
+ format2.IsPixelRateLess(format2));
+
+ format2 = format;
+ format2.interval /= 2;
+ EXPECT_TRUE(format.IsPixelRateLess(format2));
+
+ format2 = format;
+ format2.width *= 2;
+ EXPECT_TRUE(format.IsPixelRateLess(format2));
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/videoengine_unittest.h b/third_party/libwebrtc/webrtc/media/base/videoengine_unittest.h
new file mode 100644
index 0000000000..d49f73b206
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videoengine_unittest.h
@@ -0,0 +1,951 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_VIDEOENGINE_UNITTEST_H_ // NOLINT
+#define MEDIA_BASE_VIDEOENGINE_UNITTEST_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "call/call.h"
+#include "logging/rtc_event_log/rtc_event_log.h"
+#include "media/base/fakenetworkinterface.h"
+#include "media/base/fakevideocapturer.h"
+#include "media/base/fakevideorenderer.h"
+#include "media/base/mediachannel.h"
+#include "media/base/streamparams.h"
+#include "media/engine/fakewebrtccall.h"
+#include "rtc_base/bytebuffer.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/timeutils.h"
+
+namespace cricket {
+class WebRtcVideoEncoderFactory;
+class WebRtcVideoDecoderFactory;
+} // namespace cricket
+
+#define EXPECT_FRAME_WAIT(c, w, h, t) \
+ EXPECT_EQ_WAIT((c), renderer_.num_rendered_frames(), (t)); \
+ EXPECT_EQ((w), renderer_.width()); \
+ EXPECT_EQ((h), renderer_.height()); \
+ EXPECT_EQ(0, renderer_.errors()); \
+
+#define EXPECT_FRAME_ON_RENDERER_WAIT(r, c, w, h, t) \
+ EXPECT_EQ_WAIT((c), (r).num_rendered_frames(), (t)); \
+ EXPECT_EQ((w), (r).width()); \
+ EXPECT_EQ((h), (r).height()); \
+ EXPECT_EQ(0, (r).errors()); \
+
+#define EXPECT_GT_FRAME_ON_RENDERER_WAIT(r, c, w, h, t) \
+ EXPECT_TRUE_WAIT((r).num_rendered_frames() >= (c) && \
+ (w) == (r).width() && \
+ (h) == (r).height(), (t)); \
+ EXPECT_EQ(0, (r).errors());
+
+static const uint32_t kTimeout = 5000U;
+static const uint32_t kDefaultReceiveSsrc = 0;
+static const uint32_t kSsrc = 1234u;
+static const uint32_t kRtxSsrc = 4321u;
+static const uint32_t kSsrcs4[] = {1, 2, 3, 4};
+static const int kVideoWidth = 640;
+static const int kVideoHeight = 360;
+static const int kFramerate = 30;
+
+inline bool IsEqualCodec(const cricket::VideoCodec& a,
+ const cricket::VideoCodec& b) {
+ return a.id == b.id && a.name == b.name;
+}
+
+template<class E, class C>
+class VideoMediaChannelTest : public testing::Test,
+ public sigslot::has_slots<> {
+ protected:
+ VideoMediaChannelTest<E, C>()
+ : call_(webrtc::Call::Create(webrtc::Call::Config(&event_log_))),
+ engine_(std::unique_ptr<cricket::WebRtcVideoEncoderFactory>(),
+ std::unique_ptr<cricket::WebRtcVideoDecoderFactory>()) {}
+
+ virtual cricket::VideoCodec DefaultCodec() = 0;
+
+ virtual cricket::StreamParams DefaultSendStreamParams() {
+ return cricket::StreamParams::CreateLegacy(kSsrc);
+ }
+
+ virtual void SetUp() {
+ cricket::MediaConfig media_config;
+ // Disabling cpu overuse detection actually disables quality scaling too; it
+ // implies DegradationPreference kMaintainResolution. Automatic scaling
+ // needs to be disabled, otherwise, tests which check the size of received
+ // frames become flaky.
+ media_config.video.enable_cpu_overuse_detection = false;
+ channel_.reset(engine_.CreateChannel(call_.get(), media_config,
+ cricket::VideoOptions()));
+ channel_->OnReadyToSend(true);
+ EXPECT_TRUE(channel_.get() != NULL);
+ network_interface_.SetDestination(channel_.get());
+ channel_->SetInterface(&network_interface_);
+ media_error_ = cricket::VideoMediaChannel::ERROR_NONE;
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs = engine_.codecs();
+ channel_->SetRecvParameters(parameters);
+ EXPECT_TRUE(channel_->AddSendStream(DefaultSendStreamParams()));
+ video_capturer_.reset(CreateFakeVideoCapturer());
+ cricket::VideoFormat format(640, 480,
+ cricket::VideoFormat::FpsToInterval(kFramerate),
+ cricket::FOURCC_I420);
+ EXPECT_EQ(cricket::CS_RUNNING, video_capturer_->Start(format));
+ EXPECT_TRUE(
+ channel_->SetVideoSend(kSsrc, true, nullptr, video_capturer_.get()));
+ }
+
+ virtual cricket::FakeVideoCapturer* CreateFakeVideoCapturer() {
+ return new cricket::FakeVideoCapturer();
+ }
+
+ // Utility method to setup an additional stream to send and receive video.
+ // Used to test send and recv between two streams.
+ void SetUpSecondStream() {
+ SetUpSecondStreamWithNoRecv();
+ // Setup recv for second stream.
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(kSsrc + 2)));
+ // Make the second renderer available for use by a new stream.
+ EXPECT_TRUE(channel_->SetSink(kSsrc + 2, &renderer2_));
+ }
+ // Setup an additional stream just to send video. Defer add recv stream.
+ // This is required if you want to test unsignalled recv of video rtp packets.
+ void SetUpSecondStreamWithNoRecv() {
+ // SetUp() already added kSsrc make sure duplicate SSRCs cant be added.
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(kSsrc)));
+ EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_));
+ EXPECT_FALSE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrc)));
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrc + 2)));
+ // We dont add recv for the second stream.
+
+ // Setup the receive and renderer for second stream after send.
+ video_capturer_2_.reset(CreateFakeVideoCapturer());
+ cricket::VideoFormat format(640, 480,
+ cricket::VideoFormat::FpsToInterval(kFramerate),
+ cricket::FOURCC_I420);
+ EXPECT_EQ(cricket::CS_RUNNING, video_capturer_2_->Start(format));
+
+ EXPECT_TRUE(channel_->SetVideoSend(kSsrc + 2, true, nullptr,
+ video_capturer_2_.get()));
+ }
+ virtual void TearDown() {
+ channel_.reset();
+ }
+ bool SetDefaultCodec() {
+ return SetOneCodec(DefaultCodec());
+ }
+
+ bool SetOneCodec(int pt, const char* name) {
+ return SetOneCodec(cricket::VideoCodec(pt, name));
+ }
+ bool SetOneCodec(const cricket::VideoCodec& codec) {
+ cricket::VideoFormat capture_format(
+ kVideoWidth, kVideoHeight,
+ cricket::VideoFormat::FpsToInterval(kFramerate), cricket::FOURCC_I420);
+
+ if (video_capturer_) {
+ EXPECT_EQ(cricket::CS_RUNNING, video_capturer_->Start(capture_format));
+ }
+ if (video_capturer_2_) {
+ EXPECT_EQ(cricket::CS_RUNNING, video_capturer_2_->Start(capture_format));
+ }
+
+ bool sending = channel_->sending();
+ bool success = SetSend(false);
+ if (success) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(codec);
+ success = channel_->SetSendParameters(parameters);
+ }
+ if (success) {
+ success = SetSend(sending);
+ }
+ return success;
+ }
+ bool SetSend(bool send) {
+ return channel_->SetSend(send);
+ }
+ int DrainOutgoingPackets() {
+ int packets = 0;
+ do {
+ packets = NumRtpPackets();
+ // 100 ms should be long enough.
+ rtc::Thread::Current()->ProcessMessages(100);
+ } while (NumRtpPackets() > packets);
+ return NumRtpPackets();
+ }
+ bool SendFrame() {
+ if (video_capturer_2_) {
+ video_capturer_2_->CaptureFrame();
+ }
+ return video_capturer_.get() &&
+ video_capturer_->CaptureFrame();
+ }
+ bool WaitAndSendFrame(int wait_ms) {
+ bool ret = rtc::Thread::Current()->ProcessMessages(wait_ms);
+ ret &= SendFrame();
+ return ret;
+ }
+ // Sends frames and waits for the decoder to be fully initialized.
+ // Returns the number of frames that were sent.
+ int WaitForDecoder() {
+#if defined(HAVE_OPENMAX)
+ // Send enough frames for the OpenMAX decoder to continue processing, and
+ // return the number of frames sent.
+ // Send frames for a full kTimeout's worth of 15fps video.
+ int frame_count = 0;
+ while (frame_count < static_cast<int>(kTimeout) / 66) {
+ EXPECT_TRUE(WaitAndSendFrame(66));
+ ++frame_count;
+ }
+ return frame_count;
+#else
+ return 0;
+#endif
+ }
+ bool SendCustomVideoFrame(int w, int h) {
+ if (!video_capturer_.get()) return false;
+ return video_capturer_->CaptureCustomFrame(w, h, cricket::FOURCC_I420);
+ }
+ int NumRtpBytes() {
+ return network_interface_.NumRtpBytes();
+ }
+ int NumRtpBytes(uint32_t ssrc) {
+ return network_interface_.NumRtpBytes(ssrc);
+ }
+ int NumRtpPackets() {
+ return network_interface_.NumRtpPackets();
+ }
+ int NumRtpPackets(uint32_t ssrc) {
+ return network_interface_.NumRtpPackets(ssrc);
+ }
+ int NumSentSsrcs() {
+ return network_interface_.NumSentSsrcs();
+ }
+ const rtc::CopyOnWriteBuffer* GetRtpPacket(int index) {
+ return network_interface_.GetRtpPacket(index);
+ }
+ int NumRtcpPackets() {
+ return network_interface_.NumRtcpPackets();
+ }
+ const rtc::CopyOnWriteBuffer* GetRtcpPacket(int index) {
+ return network_interface_.GetRtcpPacket(index);
+ }
+ static int GetPayloadType(const rtc::CopyOnWriteBuffer* p) {
+ int pt = -1;
+ ParseRtpPacket(p, NULL, &pt, NULL, NULL, NULL, NULL);
+ return pt;
+ }
+ static bool ParseRtpPacket(const rtc::CopyOnWriteBuffer* p,
+ bool* x,
+ int* pt,
+ int* seqnum,
+ uint32_t* tstamp,
+ uint32_t* ssrc,
+ std::string* payload) {
+ rtc::ByteBufferReader buf(p->data<char>(), p->size());
+ uint8_t u08 = 0;
+ uint16_t u16 = 0;
+ uint32_t u32 = 0;
+
+ // Read X and CC fields.
+ if (!buf.ReadUInt8(&u08)) return false;
+ bool extension = ((u08 & 0x10) != 0);
+ uint8_t cc = (u08 & 0x0F);
+ if (x) *x = extension;
+
+ // Read PT field.
+ if (!buf.ReadUInt8(&u08)) return false;
+ if (pt) *pt = (u08 & 0x7F);
+
+ // Read Sequence Number field.
+ if (!buf.ReadUInt16(&u16)) return false;
+ if (seqnum) *seqnum = u16;
+
+ // Read Timestamp field.
+ if (!buf.ReadUInt32(&u32)) return false;
+ if (tstamp) *tstamp = u32;
+
+ // Read SSRC field.
+ if (!buf.ReadUInt32(&u32)) return false;
+ if (ssrc) *ssrc = u32;
+
+ // Skip CSRCs.
+ for (uint8_t i = 0; i < cc; ++i) {
+ if (!buf.ReadUInt32(&u32)) return false;
+ }
+
+ // Skip extension header.
+ if (extension) {
+ // Read Profile-specific extension header ID
+ if (!buf.ReadUInt16(&u16)) return false;
+
+ // Read Extension header length
+ if (!buf.ReadUInt16(&u16)) return false;
+ uint16_t ext_header_len = u16;
+
+ // Read Extension header
+ for (uint16_t i = 0; i < ext_header_len; ++i) {
+ if (!buf.ReadUInt32(&u32)) return false;
+ }
+ }
+
+ if (payload) {
+ return buf.ReadString(payload, buf.Length());
+ }
+ return true;
+ }
+
+ // Parse all RTCP packet, from start_index to stop_index, and count how many
+ // FIR (PT=206 and FMT=4 according to RFC 5104). If successful, set the count
+ // and return true.
+ bool CountRtcpFir(int start_index, int stop_index, int* fir_count) {
+ int count = 0;
+ for (int i = start_index; i < stop_index; ++i) {
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtcpPacket(i));
+ rtc::ByteBufferReader buf(p->data<char>(), p->size());
+ size_t total_len = 0;
+ // The packet may be a compound RTCP packet.
+ while (total_len < p->size()) {
+ // Read FMT, type and length.
+ uint8_t fmt = 0;
+ uint8_t type = 0;
+ uint16_t length = 0;
+ if (!buf.ReadUInt8(&fmt)) return false;
+ fmt &= 0x1F;
+ if (!buf.ReadUInt8(&type)) return false;
+ if (!buf.ReadUInt16(&length)) return false;
+ buf.Consume(length * 4); // Skip RTCP data.
+ total_len += (length + 1) * 4;
+ if ((192 == type) || ((206 == type) && (4 == fmt))) {
+ ++count;
+ }
+ }
+ }
+
+ if (fir_count) {
+ *fir_count = count;
+ }
+ return true;
+ }
+
+ void OnVideoChannelError(uint32_t ssrc,
+ cricket::VideoMediaChannel::Error error) {
+ media_error_ = error;
+ }
+
+ // Test that SetSend works.
+ void SetSend() {
+ EXPECT_FALSE(channel_->sending());
+ EXPECT_TRUE(
+ channel_->SetVideoSend(kSsrc, true, nullptr, video_capturer_.get()));
+ EXPECT_TRUE(SetOneCodec(DefaultCodec()));
+ EXPECT_FALSE(channel_->sending());
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->sending());
+ EXPECT_TRUE(SendFrame());
+ EXPECT_TRUE_WAIT(NumRtpPackets() > 0, kTimeout);
+ EXPECT_TRUE(SetSend(false));
+ EXPECT_FALSE(channel_->sending());
+ }
+ // Test that SetSend fails without codecs being set.
+ void SetSendWithoutCodecs() {
+ EXPECT_FALSE(channel_->sending());
+ EXPECT_FALSE(SetSend(true));
+ EXPECT_FALSE(channel_->sending());
+ }
+ // Test that we properly set the send and recv buffer sizes by the time
+ // SetSend is called.
+ void SetSendSetsTransportBufferSizes() {
+ EXPECT_TRUE(SetOneCodec(DefaultCodec()));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_EQ(64 * 1024, network_interface_.sendbuf_size());
+ EXPECT_EQ(64 * 1024, network_interface_.recvbuf_size());
+ }
+ // Tests that we can send frames and the right payload type is used.
+ void Send(const cricket::VideoCodec& codec) {
+ EXPECT_TRUE(SetOneCodec(codec));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(SendFrame());
+ EXPECT_TRUE_WAIT(NumRtpPackets() > 0, kTimeout);
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
+ EXPECT_EQ(codec.id, GetPayloadType(p.get()));
+ }
+ // Tests that we can send and receive frames.
+ void SendAndReceive(const cricket::VideoCodec& codec) {
+ EXPECT_TRUE(SetOneCodec(codec));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->SetSink(kDefaultReceiveSsrc, &renderer_));
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+ EXPECT_TRUE(SendFrame());
+ EXPECT_FRAME_WAIT(1, kVideoWidth, kVideoHeight, kTimeout);
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
+ EXPECT_EQ(codec.id, GetPayloadType(p.get()));
+ }
+ void SendReceiveManyAndGetStats(const cricket::VideoCodec& codec,
+ int duration_sec, int fps) {
+ EXPECT_TRUE(SetOneCodec(codec));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->SetSink(kDefaultReceiveSsrc, &renderer_));
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+ for (int i = 0; i < duration_sec; ++i) {
+ for (int frame = 1; frame <= fps; ++frame) {
+ EXPECT_TRUE(WaitAndSendFrame(1000 / fps));
+ EXPECT_FRAME_WAIT(frame + i * fps, kVideoWidth, kVideoHeight, kTimeout);
+ }
+ }
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
+ EXPECT_EQ(codec.id, GetPayloadType(p.get()));
+ }
+
+ // Test that stats work properly for a 1-1 call.
+ void GetStats() {
+ const int kDurationSec = 3;
+ const int kFps = 10;
+ SendReceiveManyAndGetStats(DefaultCodec(), kDurationSec, kFps);
+
+ cricket::VideoMediaInfo info;
+ EXPECT_TRUE(channel_->GetStats(&info));
+
+ ASSERT_EQ(1U, info.senders.size());
+ // TODO(whyuan): bytes_sent and bytes_rcvd are different. Are both payload?
+ // For webrtc, bytes_sent does not include the RTP header length.
+ EXPECT_GT(info.senders[0].bytes_sent, 0);
+ EXPECT_EQ(NumRtpPackets(), info.senders[0].packets_sent);
+ EXPECT_EQ(0.0, info.senders[0].fraction_lost);
+ ASSERT_TRUE(info.senders[0].codec_payload_type);
+ EXPECT_EQ(DefaultCodec().id, *info.senders[0].codec_payload_type);
+ EXPECT_EQ(0, info.senders[0].firs_rcvd);
+ EXPECT_EQ(0, info.senders[0].plis_rcvd);
+ EXPECT_EQ(0, info.senders[0].nacks_rcvd);
+ EXPECT_EQ(kVideoWidth, info.senders[0].send_frame_width);
+ EXPECT_EQ(kVideoHeight, info.senders[0].send_frame_height);
+ EXPECT_GT(info.senders[0].framerate_input, 0);
+ EXPECT_GT(info.senders[0].framerate_sent, 0);
+
+ EXPECT_EQ(1U, info.send_codecs.count(DefaultCodec().id));
+ EXPECT_EQ(DefaultCodec().ToCodecParameters(),
+ info.send_codecs[DefaultCodec().id]);
+
+ ASSERT_EQ(1U, info.receivers.size());
+ EXPECT_EQ(1U, info.senders[0].ssrcs().size());
+ EXPECT_EQ(1U, info.receivers[0].ssrcs().size());
+ EXPECT_EQ(info.senders[0].ssrcs()[0], info.receivers[0].ssrcs()[0]);
+ ASSERT_TRUE(info.receivers[0].codec_payload_type);
+ EXPECT_EQ(DefaultCodec().id, *info.receivers[0].codec_payload_type);
+ EXPECT_EQ(NumRtpBytes(), info.receivers[0].bytes_rcvd);
+ EXPECT_EQ(NumRtpPackets(), info.receivers[0].packets_rcvd);
+ EXPECT_EQ(0.0, info.receivers[0].fraction_lost);
+ EXPECT_EQ(0, info.receivers[0].packets_lost);
+ // TODO(asapersson): Not set for webrtc. Handle missing stats.
+ // EXPECT_EQ(0, info.receivers[0].packets_concealed);
+ EXPECT_EQ(0, info.receivers[0].firs_sent);
+ EXPECT_EQ(0, info.receivers[0].plis_sent);
+ EXPECT_EQ(0, info.receivers[0].nacks_sent);
+ EXPECT_EQ(kVideoWidth, info.receivers[0].frame_width);
+ EXPECT_EQ(kVideoHeight, info.receivers[0].frame_height);
+ EXPECT_GT(info.receivers[0].framerate_rcvd, 0);
+ EXPECT_GT(info.receivers[0].framerate_decoded, 0);
+ EXPECT_GT(info.receivers[0].framerate_output, 0);
+
+ EXPECT_EQ(1U, info.receive_codecs.count(DefaultCodec().id));
+ EXPECT_EQ(DefaultCodec().ToCodecParameters(),
+ info.receive_codecs[DefaultCodec().id]);
+ }
+
+ cricket::VideoSenderInfo GetSenderStats(size_t i) {
+ cricket::VideoMediaInfo info;
+ EXPECT_TRUE(channel_->GetStats(&info));
+ return info.senders[i];
+ }
+
+ cricket::VideoReceiverInfo GetReceiverStats(size_t i) {
+ cricket::VideoMediaInfo info;
+ EXPECT_TRUE(channel_->GetStats(&info));
+ return info.receivers[i];
+ }
+
+ // Test that stats work properly for a conf call with multiple recv streams.
+ void GetStatsMultipleRecvStreams() {
+ cricket::FakeVideoRenderer renderer1, renderer2;
+ EXPECT_TRUE(SetOneCodec(DefaultCodec()));
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(DefaultCodec());
+ parameters.conference_mode = true;
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(1)));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(2)));
+ EXPECT_TRUE(channel_->SetSink(1, &renderer1));
+ EXPECT_TRUE(channel_->SetSink(2, &renderer2));
+ EXPECT_EQ(0, renderer1.num_rendered_frames());
+ EXPECT_EQ(0, renderer2.num_rendered_frames());
+ std::vector<uint32_t> ssrcs;
+ ssrcs.push_back(1);
+ ssrcs.push_back(2);
+ network_interface_.SetConferenceMode(true, ssrcs);
+ EXPECT_TRUE(SendFrame());
+ EXPECT_FRAME_ON_RENDERER_WAIT(renderer1, 1, kVideoWidth, kVideoHeight,
+ kTimeout);
+ EXPECT_FRAME_ON_RENDERER_WAIT(renderer2, 1, kVideoWidth, kVideoHeight,
+ kTimeout);
+
+ EXPECT_TRUE(channel_->SetSend(false));
+
+ cricket::VideoMediaInfo info;
+ EXPECT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(1U, info.senders.size());
+ // TODO(whyuan): bytes_sent and bytes_rcvd are different. Are both payload?
+ // For webrtc, bytes_sent does not include the RTP header length.
+ EXPECT_GT(GetSenderStats(0).bytes_sent, 0);
+ EXPECT_EQ_WAIT(NumRtpPackets(), GetSenderStats(0).packets_sent, kTimeout);
+ EXPECT_EQ(kVideoWidth, GetSenderStats(0).send_frame_width);
+ EXPECT_EQ(kVideoHeight, GetSenderStats(0).send_frame_height);
+
+ ASSERT_EQ(2U, info.receivers.size());
+ for (size_t i = 0; i < info.receivers.size(); ++i) {
+ EXPECT_EQ(1U, GetReceiverStats(i).ssrcs().size());
+ EXPECT_EQ(i + 1, GetReceiverStats(i).ssrcs()[0]);
+ EXPECT_EQ_WAIT(NumRtpBytes(), GetReceiverStats(i).bytes_rcvd, kTimeout);
+ EXPECT_EQ_WAIT(NumRtpPackets(), GetReceiverStats(i).packets_rcvd,
+ kTimeout);
+ EXPECT_EQ_WAIT(kVideoWidth, GetReceiverStats(i).frame_width, kTimeout);
+ EXPECT_EQ_WAIT(kVideoHeight, GetReceiverStats(i).frame_height, kTimeout);
+ }
+ }
+ // Test that stats work properly for a conf call with multiple send streams.
+ void GetStatsMultipleSendStreams() {
+ // Normal setup; note that we set the SSRC explicitly to ensure that
+ // it will come first in the senders map.
+ EXPECT_TRUE(SetOneCodec(DefaultCodec()));
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(DefaultCodec());
+ parameters.conference_mode = true;
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(kSsrc)));
+ EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(SendFrame());
+ EXPECT_TRUE_WAIT(NumRtpPackets() > 0, kTimeout);
+ EXPECT_FRAME_WAIT(1, kVideoWidth, kVideoHeight, kTimeout);
+
+ // Add an additional capturer, and hook up a renderer to receive it.
+ cricket::FakeVideoRenderer renderer2;
+ std::unique_ptr<cricket::FakeVideoCapturer> capturer(
+ CreateFakeVideoCapturer());
+ const int kTestWidth = 160;
+ const int kTestHeight = 120;
+ cricket::VideoFormat format(kTestWidth, kTestHeight,
+ cricket::VideoFormat::FpsToInterval(5),
+ cricket::FOURCC_I420);
+ EXPECT_EQ(cricket::CS_RUNNING, capturer->Start(format));
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(5678)));
+ EXPECT_TRUE(channel_->SetVideoSend(5678, true, nullptr, capturer.get()));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(5678)));
+ EXPECT_TRUE(channel_->SetSink(5678, &renderer2));
+ EXPECT_TRUE(capturer->CaptureCustomFrame(
+ kTestWidth, kTestHeight, cricket::FOURCC_I420));
+ EXPECT_FRAME_ON_RENDERER_WAIT(
+ renderer2, 1, kTestWidth, kTestHeight, kTimeout);
+
+ // Get stats, and make sure they are correct for two senders. We wait until
+ // the number of expected packets have been sent to avoid races where we
+ // check stats before it has been updated.
+ cricket::VideoMediaInfo info;
+ for (uint32_t i = 0; i < kTimeout; ++i) {
+ rtc::Thread::Current()->ProcessMessages(1);
+ EXPECT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(2U, info.senders.size());
+ if (info.senders[0].packets_sent + info.senders[1].packets_sent ==
+ NumRtpPackets()) {
+ // Stats have been updated for both sent frames, expectations can be
+ // checked now.
+ break;
+ }
+ }
+ EXPECT_EQ(NumRtpPackets(),
+ info.senders[0].packets_sent + info.senders[1].packets_sent)
+ << "Timed out while waiting for packet counts for all sent packets.";
+ EXPECT_EQ(1U, info.senders[0].ssrcs().size());
+ EXPECT_EQ(1234U, info.senders[0].ssrcs()[0]);
+ EXPECT_EQ(kVideoWidth, info.senders[0].send_frame_width);
+ EXPECT_EQ(kVideoHeight, info.senders[0].send_frame_height);
+ EXPECT_EQ(1U, info.senders[1].ssrcs().size());
+ EXPECT_EQ(5678U, info.senders[1].ssrcs()[0]);
+ EXPECT_EQ(kTestWidth, info.senders[1].send_frame_width);
+ EXPECT_EQ(kTestHeight, info.senders[1].send_frame_height);
+ // The capturer must be unregistered here as it runs out of it's scope next.
+ channel_->SetVideoSend(5678, true, nullptr, nullptr);
+ }
+
+ // Test that we can set the bandwidth.
+ void SetSendBandwidth() {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(DefaultCodec());
+ parameters.max_bandwidth_bps = -1; // <= 0 means unlimited.
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ parameters.max_bandwidth_bps = 128 * 1024;
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ }
+ // Test that we can set the SSRC for the default send source.
+ void SetSendSsrc() {
+ EXPECT_TRUE(SetDefaultCodec());
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(SendFrame());
+ EXPECT_TRUE_WAIT(NumRtpPackets() > 0, kTimeout);
+ uint32_t ssrc = 0;
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
+ ParseRtpPacket(p.get(), NULL, NULL, NULL, NULL, &ssrc, NULL);
+ EXPECT_EQ(kSsrc, ssrc);
+ // Packets are being paced out, so these can mismatch between the first and
+ // second call to NumRtpPackets until pending packets are paced out.
+ EXPECT_EQ_WAIT(NumRtpPackets(), NumRtpPackets(ssrc), kTimeout);
+ EXPECT_EQ_WAIT(NumRtpBytes(), NumRtpBytes(ssrc), kTimeout);
+ EXPECT_EQ(1, NumSentSsrcs());
+ EXPECT_EQ(0, NumRtpPackets(kSsrc - 1));
+ EXPECT_EQ(0, NumRtpBytes(kSsrc - 1));
+ }
+ // Test that we can set the SSRC even after codecs are set.
+ void SetSendSsrcAfterSetCodecs() {
+ // Remove stream added in Setup.
+ EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
+ EXPECT_TRUE(SetDefaultCodec());
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(999)));
+ EXPECT_TRUE(
+ channel_->SetVideoSend(999u, true, nullptr, video_capturer_.get()));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(WaitAndSendFrame(0));
+ EXPECT_TRUE_WAIT(NumRtpPackets() > 0, kTimeout);
+ uint32_t ssrc = 0;
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
+ ParseRtpPacket(p.get(), NULL, NULL, NULL, NULL, &ssrc, NULL);
+ EXPECT_EQ(999u, ssrc);
+ // Packets are being paced out, so these can mismatch between the first and
+ // second call to NumRtpPackets until pending packets are paced out.
+ EXPECT_EQ_WAIT(NumRtpPackets(), NumRtpPackets(ssrc), kTimeout);
+ EXPECT_EQ_WAIT(NumRtpBytes(), NumRtpBytes(ssrc), kTimeout);
+ EXPECT_EQ(1, NumSentSsrcs());
+ EXPECT_EQ(0, NumRtpPackets(kSsrc));
+ EXPECT_EQ(0, NumRtpBytes(kSsrc));
+ }
+ // Test that we can set the default video renderer before and after
+ // media is received.
+ void SetSink() {
+ uint8_t data1[] = {
+ 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
+
+ rtc::CopyOnWriteBuffer packet1(data1, sizeof(data1));
+ rtc::SetBE32(packet1.data() + 8, kSsrc);
+ channel_->SetSink(kDefaultReceiveSsrc, NULL);
+ EXPECT_TRUE(SetDefaultCodec());
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+ channel_->OnPacketReceived(&packet1, rtc::PacketTime());
+ EXPECT_TRUE(channel_->SetSink(kDefaultReceiveSsrc, &renderer_));
+ EXPECT_TRUE(SendFrame());
+ EXPECT_FRAME_WAIT(1, kVideoWidth, kVideoHeight, kTimeout);
+ }
+
+ // Tests empty StreamParams is rejected.
+ void RejectEmptyStreamParams() {
+ // Remove the send stream that was added during Setup.
+ EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
+
+ cricket::StreamParams empty;
+ EXPECT_FALSE(channel_->AddSendStream(empty));
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(789u)));
+ }
+
+ // Tests setting up and configuring a send stream.
+ void AddRemoveSendStreams() {
+ EXPECT_TRUE(SetOneCodec(DefaultCodec()));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->SetSink(kDefaultReceiveSsrc, &renderer_));
+ EXPECT_TRUE(SendFrame());
+ EXPECT_FRAME_WAIT(1, kVideoWidth, kVideoHeight, kTimeout);
+ EXPECT_GT(NumRtpPackets(), 0);
+ uint32_t ssrc = 0;
+ size_t last_packet = NumRtpPackets() - 1;
+ std::unique_ptr<const rtc::CopyOnWriteBuffer>
+ p(GetRtpPacket(static_cast<int>(last_packet)));
+ ParseRtpPacket(p.get(), NULL, NULL, NULL, NULL, &ssrc, NULL);
+ EXPECT_EQ(kSsrc, ssrc);
+
+ // Remove the send stream that was added during Setup.
+ EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
+ int rtp_packets = NumRtpPackets();
+
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(789u)));
+ EXPECT_TRUE(
+ channel_->SetVideoSend(789u, true, nullptr, video_capturer_.get()));
+ EXPECT_EQ(rtp_packets, NumRtpPackets());
+ // Wait 30ms to guarantee the engine does not drop the frame.
+ EXPECT_TRUE(WaitAndSendFrame(30));
+ EXPECT_TRUE_WAIT(NumRtpPackets() > rtp_packets, kTimeout);
+
+ last_packet = NumRtpPackets() - 1;
+ p.reset(GetRtpPacket(static_cast<int>(last_packet)));
+ ParseRtpPacket(p.get(), NULL, NULL, NULL, NULL, &ssrc, NULL);
+ EXPECT_EQ(789u, ssrc);
+ }
+
+ // Tests the behavior of incoming streams in a conference scenario.
+ void SimulateConference() {
+ cricket::FakeVideoRenderer renderer1, renderer2;
+ EXPECT_TRUE(SetDefaultCodec());
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(DefaultCodec());
+ parameters.conference_mode = true;
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(1)));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(2)));
+ EXPECT_TRUE(channel_->SetSink(1, &renderer1));
+ EXPECT_TRUE(channel_->SetSink(2, &renderer2));
+ EXPECT_EQ(0, renderer1.num_rendered_frames());
+ EXPECT_EQ(0, renderer2.num_rendered_frames());
+ std::vector<uint32_t> ssrcs;
+ ssrcs.push_back(1);
+ ssrcs.push_back(2);
+ network_interface_.SetConferenceMode(true, ssrcs);
+ EXPECT_TRUE(SendFrame());
+ EXPECT_FRAME_ON_RENDERER_WAIT(renderer1, 1, kVideoWidth, kVideoHeight,
+ kTimeout);
+ EXPECT_FRAME_ON_RENDERER_WAIT(renderer2, 1, kVideoWidth, kVideoHeight,
+ kTimeout);
+
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
+ EXPECT_EQ(DefaultCodec().id, GetPayloadType(p.get()));
+ EXPECT_EQ(kVideoWidth, renderer1.width());
+ EXPECT_EQ(kVideoHeight, renderer1.height());
+ EXPECT_EQ(kVideoWidth, renderer2.width());
+ EXPECT_EQ(kVideoHeight, renderer2.height());
+ EXPECT_TRUE(channel_->RemoveRecvStream(2));
+ EXPECT_TRUE(channel_->RemoveRecvStream(1));
+ }
+
+ // Tests that we can add and remove capturers and frames are sent out properly
+ void AddRemoveCapturer() {
+ cricket::VideoCodec codec = DefaultCodec();
+ const int time_between_send_ms =
+ cricket::VideoFormat::FpsToInterval(kFramerate);
+ EXPECT_TRUE(SetOneCodec(codec));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->SetSink(kDefaultReceiveSsrc, &renderer_));
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+ EXPECT_TRUE(SendFrame());
+ EXPECT_FRAME_WAIT(1, kVideoWidth, kVideoHeight, kTimeout);
+ std::unique_ptr<cricket::FakeVideoCapturer> capturer(
+ CreateFakeVideoCapturer());
+
+ // TODO(nisse): This testcase fails if we don't configure
+ // screencast. It's unclear why, I see nothing obvious in this
+ // test which is related to screencast logic.
+ cricket::VideoOptions video_options;
+ video_options.is_screencast = true;
+ channel_->SetVideoSend(kSsrc, true, &video_options, nullptr);
+
+ cricket::VideoFormat format(480, 360,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420);
+ EXPECT_EQ(cricket::CS_RUNNING, capturer->Start(format));
+ // All capturers start generating frames with the same timestamp. ViE does
+ // not allow the same timestamp to be used. Capture one frame before
+ // associating the capturer with the channel.
+ EXPECT_TRUE(capturer->CaptureCustomFrame(format.width, format.height,
+ cricket::FOURCC_I420));
+
+ int captured_frames = 1;
+ for (int iterations = 0; iterations < 2; ++iterations) {
+ EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr, capturer.get()));
+ rtc::Thread::Current()->ProcessMessages(time_between_send_ms);
+ EXPECT_TRUE(capturer->CaptureCustomFrame(format.width, format.height,
+ cricket::FOURCC_I420));
+ ++captured_frames;
+ // Wait until frame of right size is captured.
+ EXPECT_TRUE_WAIT(renderer_.num_rendered_frames() >= captured_frames &&
+ format.width == renderer_.width() &&
+ format.height == renderer_.height() &&
+ !renderer_.black_frame(), kTimeout);
+ EXPECT_GE(renderer_.num_rendered_frames(), captured_frames);
+ EXPECT_EQ(format.width, renderer_.width());
+ EXPECT_EQ(format.height, renderer_.height());
+ captured_frames = renderer_.num_rendered_frames() + 1;
+ EXPECT_FALSE(renderer_.black_frame());
+ EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr, nullptr));
+ // Make sure a black frame is generated within the specified timeout.
+ // The black frame should be the resolution of the previous frame to
+ // prevent expensive encoder reconfigurations.
+ EXPECT_TRUE_WAIT(renderer_.num_rendered_frames() >= captured_frames &&
+ format.width == renderer_.width() &&
+ format.height == renderer_.height() &&
+ renderer_.black_frame(), kTimeout);
+ EXPECT_GE(renderer_.num_rendered_frames(), captured_frames);
+ EXPECT_EQ(format.width, renderer_.width());
+ EXPECT_EQ(format.height, renderer_.height());
+ EXPECT_TRUE(renderer_.black_frame());
+
+ // The black frame has the same timestamp as the next frame since it's
+ // timestamp is set to the last frame's timestamp + interval. WebRTC will
+ // not render a frame with the same timestamp so capture another frame
+ // with the frame capturer to increment the next frame's timestamp.
+ EXPECT_TRUE(capturer->CaptureCustomFrame(format.width, format.height,
+ cricket::FOURCC_I420));
+ }
+ }
+
+ // Tests that if SetVideoSend is called with a NULL capturer after the
+ // capturer was already removed, the application doesn't crash (and no black
+ // frame is sent).
+ void RemoveCapturerWithoutAdd() {
+ EXPECT_TRUE(SetOneCodec(DefaultCodec()));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->SetSink(kDefaultReceiveSsrc, &renderer_));
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+ EXPECT_TRUE(SendFrame());
+ EXPECT_FRAME_WAIT(1, kVideoWidth, kVideoHeight, kTimeout);
+ // Wait for one frame so they don't get dropped because we send frames too
+ // tightly.
+ rtc::Thread::Current()->ProcessMessages(30);
+ // Remove the capturer.
+ EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr, nullptr));
+
+ // No capturer was added, so this SetVideoSend shouldn't do anything.
+ EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr, nullptr));
+ rtc::Thread::Current()->ProcessMessages(300);
+ // Verify no more frames were sent.
+ EXPECT_EQ(1, renderer_.num_rendered_frames());
+ }
+
+ // Tests that we can add and remove capturer as unique sources.
+ void AddRemoveCapturerMultipleSources() {
+ // WebRTC implementation will drop frames if pushed to quickly. Wait the
+ // interval time to avoid that.
+ // WebRTC implementation will drop frames if pushed to quickly. Wait the
+ // interval time to avoid that.
+ // Set up the stream associated with the engine.
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(kSsrc)));
+ EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_));
+ cricket::VideoFormat capture_format; // default format
+ capture_format.interval = cricket::VideoFormat::FpsToInterval(kFramerate);
+ // Set up additional stream 1.
+ cricket::FakeVideoRenderer renderer1;
+ EXPECT_FALSE(channel_->SetSink(1, &renderer1));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(1)));
+ EXPECT_TRUE(channel_->SetSink(1, &renderer1));
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(1)));
+ std::unique_ptr<cricket::FakeVideoCapturer> capturer1(
+ CreateFakeVideoCapturer());
+ EXPECT_EQ(cricket::CS_RUNNING, capturer1->Start(capture_format));
+ // Set up additional stream 2.
+ cricket::FakeVideoRenderer renderer2;
+ EXPECT_FALSE(channel_->SetSink(2, &renderer2));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(2)));
+ EXPECT_TRUE(channel_->SetSink(2, &renderer2));
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(2)));
+ std::unique_ptr<cricket::FakeVideoCapturer> capturer2(
+ CreateFakeVideoCapturer());
+ EXPECT_EQ(cricket::CS_RUNNING, capturer2->Start(capture_format));
+ // State for all the streams.
+ EXPECT_TRUE(SetOneCodec(DefaultCodec()));
+ // A limitation in the lmi implementation requires that SetVideoSend() is
+ // called after SetOneCodec().
+ // TODO(hellner): this seems like an unnecessary constraint, fix it.
+ EXPECT_TRUE(channel_->SetVideoSend(1, true, nullptr, capturer1.get()));
+ EXPECT_TRUE(channel_->SetVideoSend(2, true, nullptr, capturer2.get()));
+ EXPECT_TRUE(SetSend(true));
+ // Test capturer associated with engine.
+ const int kTestWidth = 160;
+ const int kTestHeight = 120;
+ EXPECT_TRUE(capturer1->CaptureCustomFrame(
+ kTestWidth, kTestHeight, cricket::FOURCC_I420));
+ EXPECT_FRAME_ON_RENDERER_WAIT(
+ renderer1, 1, kTestWidth, kTestHeight, kTimeout);
+ // Capture a frame with additional capturer2, frames should be received
+ EXPECT_TRUE(capturer2->CaptureCustomFrame(
+ kTestWidth, kTestHeight, cricket::FOURCC_I420));
+ EXPECT_FRAME_ON_RENDERER_WAIT(
+ renderer2, 1, kTestWidth, kTestHeight, kTimeout);
+ // Successfully remove the capturer.
+ EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr, nullptr));
+ // The capturers must be unregistered here as it runs out of it's scope
+ // next.
+ EXPECT_TRUE(channel_->SetVideoSend(1, true, nullptr, nullptr));
+ EXPECT_TRUE(channel_->SetVideoSend(2, true, nullptr, nullptr));
+ }
+
+ // Test that multiple send streams can be created and deleted properly.
+ void MultipleSendStreams() {
+ // Remove stream added in Setup. I.e. remove stream corresponding to default
+ // channel.
+ EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
+ const unsigned int kSsrcsSize = sizeof(kSsrcs4)/sizeof(kSsrcs4[0]);
+ for (unsigned int i = 0; i < kSsrcsSize; ++i) {
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrcs4[i])));
+ }
+ // Delete one of the non default channel streams, let the destructor delete
+ // the remaining ones.
+ EXPECT_TRUE(channel_->RemoveSendStream(kSsrcs4[kSsrcsSize - 1]));
+ // Stream should already be deleted.
+ EXPECT_FALSE(channel_->RemoveSendStream(kSsrcs4[kSsrcsSize - 1]));
+ }
+
+ // Two streams one channel tests.
+
+ // Tests that we can send and receive frames.
+ void TwoStreamsSendAndReceive(const cricket::VideoCodec& codec) {
+ SetUpSecondStream();
+ // Test sending and receiving on first stream.
+ SendAndReceive(codec);
+ // Test sending and receiving on second stream.
+ EXPECT_EQ_WAIT(1, renderer2_.num_rendered_frames(), kTimeout);
+ EXPECT_GT(NumRtpPackets(), 0);
+ EXPECT_EQ(1, renderer2_.num_rendered_frames());
+ }
+
+ webrtc::RtcEventLogNullImpl event_log_;
+ const std::unique_ptr<webrtc::Call> call_;
+ E engine_;
+ std::unique_ptr<cricket::FakeVideoCapturer> video_capturer_;
+ std::unique_ptr<cricket::FakeVideoCapturer> video_capturer_2_;
+ std::unique_ptr<C> channel_;
+ cricket::FakeNetworkInterface network_interface_;
+ cricket::FakeVideoRenderer renderer_;
+ cricket::VideoMediaChannel::Error media_error_;
+
+ // Used by test cases where 2 streams are run on the same channel.
+ cricket::FakeVideoRenderer renderer2_;
+};
+
+#endif // MEDIA_BASE_VIDEOENGINE_UNITTEST_H_ NOLINT
diff --git a/third_party/libwebrtc/webrtc/media/base/videosinkinterface.h b/third_party/libwebrtc/webrtc/media/base/videosinkinterface.h
new file mode 100644
index 0000000000..900e786f24
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videosinkinterface.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_VIDEOSINKINTERFACE_H_
+#define MEDIA_BASE_VIDEOSINKINTERFACE_H_
+
+#include <rtc_base/checks.h>
+// TODO(nisse): Consider moving this interface (and possibly
+// VideoSourceInterface too) from media/base to common_video, to
+// reduce dependency cycles.
+namespace rtc {
+
+template <typename VideoFrameT>
+class VideoSinkInterface {
+ public:
+ virtual ~VideoSinkInterface() {}
+
+ virtual void OnFrame(const VideoFrameT& frame) = 0;
+
+ // Should be called by the source when it discards the frame due to rate
+ // limiting.
+ virtual void OnDiscardedFrame() {}
+};
+
+} // namespace rtc
+
+#endif // MEDIA_BASE_VIDEOSINKINTERFACE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/videosourcebase.cc b/third_party/libwebrtc/webrtc/media/base/videosourcebase.cc
new file mode 100644
index 0000000000..299795fca6
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videosourcebase.cc
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/videosourcebase.h"
+
+#include "rtc_base/checks.h"
+
+namespace rtc {
+
+VideoSourceBase::VideoSourceBase() {
+ thread_checker_.DetachFromThread();
+}
+
+void VideoSourceBase::AddOrUpdateSink(
+ VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const VideoSinkWants& wants) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(sink != nullptr);
+
+ SinkPair* sink_pair = FindSinkPair(sink);
+ if (!sink_pair) {
+ sinks_.push_back(SinkPair(sink, wants));
+ } else {
+ sink_pair->wants = wants;
+ }
+}
+
+void VideoSourceBase::RemoveSink(VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(sink != nullptr);
+ RTC_DCHECK(FindSinkPair(sink));
+ sinks_.erase(std::remove_if(sinks_.begin(), sinks_.end(),
+ [sink](const SinkPair& sink_pair) {
+ return sink_pair.sink == sink;
+ }),
+ sinks_.end());
+}
+
+VideoSourceBase::SinkPair* VideoSourceBase::FindSinkPair(
+ const VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ auto sink_pair_it = std::find_if(
+ sinks_.begin(), sinks_.end(),
+ [sink](const SinkPair& sink_pair) { return sink_pair.sink == sink; });
+ if (sink_pair_it != sinks_.end()) {
+ return &*sink_pair_it;
+ }
+ return nullptr;
+}
+
+} // namespace rtc
diff --git a/third_party/libwebrtc/webrtc/media/base/videosourcebase.h b/third_party/libwebrtc/webrtc/media/base/videosourcebase.h
new file mode 100644
index 0000000000..f19d8fb587
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videosourcebase.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_VIDEOSOURCEBASE_H_
+#define MEDIA_BASE_VIDEOSOURCEBASE_H_
+
+#include <vector>
+
+#include "api/video/video_frame.h"
+#include "media/base/videosourceinterface.h"
+#include "rtc_base/thread_checker.h"
+
+namespace rtc {
+
+// VideoSourceBase is not thread safe.
+class VideoSourceBase : public VideoSourceInterface<webrtc::VideoFrame> {
+ public:
+ VideoSourceBase();
+ void AddOrUpdateSink(VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const VideoSinkWants& wants) override;
+ void RemoveSink(VideoSinkInterface<webrtc::VideoFrame>* sink) override;
+
+ protected:
+ struct SinkPair {
+ SinkPair(VideoSinkInterface<webrtc::VideoFrame>* sink, VideoSinkWants wants)
+ : sink(sink), wants(wants) {}
+ VideoSinkInterface<webrtc::VideoFrame>* sink;
+ VideoSinkWants wants;
+ };
+ SinkPair* FindSinkPair(const VideoSinkInterface<webrtc::VideoFrame>* sink);
+
+ const std::vector<SinkPair>& sink_pairs() const { return sinks_; }
+ ThreadChecker thread_checker_;
+
+ private:
+ std::vector<SinkPair> sinks_;
+};
+
+} // namespace rtc
+
+#endif // MEDIA_BASE_VIDEOSOURCEBASE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/videosourceinterface.cc b/third_party/libwebrtc/webrtc/media/base/videosourceinterface.cc
new file mode 100644
index 0000000000..42ea04ea62
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videosourceinterface.cc
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/videosourceinterface.h"
+namespace rtc {
+
+VideoSinkWants::VideoSinkWants() = default;
+VideoSinkWants::~VideoSinkWants() = default;
+
+} // namespace rtc
diff --git a/third_party/libwebrtc/webrtc/media/base/videosourceinterface.h b/third_party/libwebrtc/webrtc/media/base/videosourceinterface.h
new file mode 100644
index 0000000000..701ddec492
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videosourceinterface.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_VIDEOSOURCEINTERFACE_H_
+#define MEDIA_BASE_VIDEOSOURCEINTERFACE_H_
+
+#include <limits>
+
+#include "api/optional.h"
+#include "media/base/videosinkinterface.h"
+
+namespace rtc {
+
+// VideoSinkWants is used for notifying the source of properties a video frame
+// should have when it is delivered to a certain sink.
+struct VideoSinkWants {
+ VideoSinkWants();
+ ~VideoSinkWants();
+ // Tells the source whether the sink wants frames with rotation applied.
+ // By default, any rotation must be applied by the sink.
+ bool rotation_applied = false;
+
+ // Tells the source that the sink only wants black frames.
+ bool black_frames = false;
+
+ // Tells the source the maximum number of pixels the sink wants.
+ int max_pixel_count = std::numeric_limits<int>::max();
+ // Tells the source the desired number of pixels the sinks wants. This will
+ // typically be used when stepping the resolution up again when conditions
+ // have improved after an earlier downgrade. The source should select the
+ // closest resolution to this pixel count, but if max_pixel_count is set, it
+ // still sets the absolute upper bound.
+ rtc::Optional<int> target_pixel_count;
+ // Tells the source the maximum framerate the sink wants.
+ int max_framerate_fps = std::numeric_limits<int>::max();
+};
+
+template <typename VideoFrameT>
+class VideoSourceInterface {
+ public:
+ virtual void AddOrUpdateSink(VideoSinkInterface<VideoFrameT>* sink,
+ const VideoSinkWants& wants) = 0;
+ // RemoveSink must guarantee that at the time the method returns,
+ // there is no current and no future calls to VideoSinkInterface::OnFrame.
+ virtual void RemoveSink(VideoSinkInterface<VideoFrameT>* sink) = 0;
+
+ protected:
+ virtual ~VideoSourceInterface() {}
+};
+
+} // namespace rtc
+#endif // MEDIA_BASE_VIDEOSOURCEINTERFACE_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/adm_helpers.cc b/third_party/libwebrtc/webrtc/media/engine/adm_helpers.cc
new file mode 100644
index 0000000000..119cc64451
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/adm_helpers.cc
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/adm_helpers.h"
+
+#include "modules/audio_device/include/audio_device.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace adm_helpers {
+
+// On Windows Vista and newer, Microsoft introduced the concept of "Default
+// Communications Device". This means that there are two types of default
+// devices (old Wave Audio style default and Default Communications Device).
+//
+// On Windows systems which only support Wave Audio style default, uses either
+// -1 or 0 to select the default device.
+//
+// Using a #define for AUDIO_DEVICE since we will call *different* versions of
+// the ADM functions, depending on the ID type.
+#if defined(WEBRTC_WIN)
+#define AUDIO_DEVICE_ID \
+ (AudioDeviceModule::WindowsDeviceType::kDefaultCommunicationDevice)
+#else
+#define AUDIO_DEVICE_ID (0u)
+#endif // defined(WEBRTC_WIN)
+
+void Init(AudioDeviceModule* adm) {
+ RTC_DCHECK(adm);
+
+ RTC_CHECK_EQ(0, adm->Init()) << "Failed to initialize the ADM.";
+
+ // Playout device.
+ {
+ if (adm->SetPlayoutDevice(AUDIO_DEVICE_ID) != 0) {
+ RTC_LOG(LS_ERROR) << "Unable to set playout device.";
+ return;
+ }
+ if (adm->InitSpeaker() != 0) {
+ RTC_LOG(LS_ERROR) << "Unable to access speaker.";
+ }
+
+ // Set number of channels
+ bool available = false;
+ if (adm->StereoPlayoutIsAvailable(&available) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to query stereo playout.";
+ }
+ if (adm->SetStereoPlayout(available) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to set stereo playout mode.";
+ }
+ }
+
+ // Recording device.
+ {
+ if (adm->SetRecordingDevice(AUDIO_DEVICE_ID) != 0) {
+ RTC_LOG(LS_ERROR) << "Unable to set recording device.";
+ return;
+ }
+ if (adm->InitMicrophone() != 0) {
+ RTC_LOG(LS_ERROR) << "Unable to access microphone.";
+ }
+
+ // Set number of channels
+ bool available = false;
+ if (adm->StereoRecordingIsAvailable(&available) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to query stereo recording.";
+ }
+ if (adm->SetStereoRecording(available) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to set stereo recording mode.";
+ }
+ }
+}
+} // namespace adm_helpers
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/adm_helpers.h b/third_party/libwebrtc/webrtc/media/engine/adm_helpers.h
new file mode 100644
index 0000000000..c6ea3a209f
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/adm_helpers.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_ADM_HELPERS_H_
+#define MEDIA_ENGINE_ADM_HELPERS_H_
+
+#include "common_types.h" // NOLINT(build/include)
+
+namespace webrtc {
+
+class AudioDeviceModule;
+
+namespace adm_helpers {
+
+void Init(AudioDeviceModule* adm);
+
+} // namespace adm_helpers
+} // namespace webrtc
+
+#endif // MEDIA_ENGINE_ADM_HELPERS_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/apm_helpers.cc b/third_party/libwebrtc/webrtc/media/engine/apm_helpers.cc
new file mode 100644
index 0000000000..5c12faa050
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/apm_helpers.cc
@@ -0,0 +1,190 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/apm_helpers.h"
+
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "rtc_base/logging.h"
+#include "voice_engine/transmit_mixer.h"
+
+namespace webrtc {
+namespace apm_helpers {
+
+void Init(AudioProcessing* apm) {
+ RTC_DCHECK(apm);
+
+ constexpr int kMinVolumeLevel = 0;
+ constexpr int kMaxVolumeLevel = 255;
+
+ // This is the initialization which used to happen in VoEBase::Init(), but
+ // which is not covered by the WVoE::ApplyOptions().
+ if (apm->echo_cancellation()->enable_drift_compensation(false) != 0) {
+ RTC_DLOG(LS_ERROR) << "Failed to disable drift compensation.";
+ }
+ GainControl* gc = apm->gain_control();
+ if (gc->set_analog_level_limits(kMinVolumeLevel, kMaxVolumeLevel) != 0) {
+ RTC_DLOG(LS_ERROR) << "Failed to set analog level limits with minimum: "
+ << kMinVolumeLevel << " and maximum: " << kMaxVolumeLevel;
+ }
+}
+
+AgcConfig GetAgcConfig(AudioProcessing* apm) {
+ RTC_DCHECK(apm);
+ AgcConfig result;
+ result.targetLeveldBOv = apm->gain_control()->target_level_dbfs();
+ result.digitalCompressionGaindB = apm->gain_control()->compression_gain_db();
+ result.limiterEnable = apm->gain_control()->is_limiter_enabled();
+ return result;
+}
+
+void SetAgcConfig(AudioProcessing* apm,
+ const AgcConfig& config) {
+ RTC_DCHECK(apm);
+ GainControl* gc = apm->gain_control();
+ if (gc->set_target_level_dbfs(config.targetLeveldBOv) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to set target level: "
+ << config.targetLeveldBOv;
+ }
+ if (gc->set_compression_gain_db(config.digitalCompressionGaindB) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to set compression gain: "
+ << config.digitalCompressionGaindB;
+ }
+ if (gc->enable_limiter(config.limiterEnable) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to set limiter on/off: "
+ << config.limiterEnable;
+ }
+}
+
+void SetAgcStatus(AudioProcessing* apm,
+ AudioDeviceModule* adm,
+ bool enable) {
+ RTC_DCHECK(apm);
+ RTC_DCHECK(adm);
+#if defined(WEBRTC_IOS) || defined(WEBRTC_ANDROID)
+ GainControl::Mode agc_mode = GainControl::kFixedDigital;
+#else
+ GainControl::Mode agc_mode = GainControl::kAdaptiveAnalog;
+#endif
+ GainControl* gc = apm->gain_control();
+ if (gc->set_mode(agc_mode) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to set AGC mode: " << agc_mode;
+ return;
+ }
+ if (gc->Enable(enable) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to enable/disable AGC: " << enable;
+ return;
+ }
+ // Set AGC state in the ADM when adaptive AGC mode has been selected.
+ if (adm->SetAGC(enable && agc_mode == GainControl::kAdaptiveAnalog) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to set AGC mode in ADM: " << enable;
+ return;
+ }
+ RTC_LOG(LS_INFO) << "AGC set to " << enable << " with mode " << agc_mode;
+}
+
+void SetEcStatus(AudioProcessing* apm,
+ bool enable,
+ EcModes mode) {
+ RTC_DCHECK(apm);
+ RTC_DCHECK(mode == kEcConference || mode == kEcAecm) << "mode: " << mode;
+ EchoCancellation* ec = apm->echo_cancellation();
+ EchoControlMobile* ecm = apm->echo_control_mobile();
+ if (mode == kEcConference) {
+ // Disable the AECM before enabling the AEC.
+ if (enable && ecm->is_enabled() && ecm->Enable(false) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to disable AECM.";
+ return;
+ }
+ if (ec->Enable(enable) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to enable/disable AEC: " << enable;
+ return;
+ }
+ if (ec->set_suppression_level(EchoCancellation::kHighSuppression)
+ != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to set high AEC aggressiveness.";
+ return;
+ }
+ } else {
+ // Disable the AEC before enabling the AECM.
+ if (enable && ec->is_enabled() && ec->Enable(false) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to disable AEC.";
+ return;
+ }
+ if (ecm->Enable(enable) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to enable/disable AECM: " << enable;
+ return;
+ }
+ }
+ RTC_LOG(LS_INFO) << "Echo control set to " << enable << " with mode " << mode;
+}
+
+void SetEcMetricsStatus(AudioProcessing* apm, bool enable) {
+ RTC_DCHECK(apm);
+ if ((apm->echo_cancellation()->enable_metrics(enable) != 0) ||
+ (apm->echo_cancellation()->enable_delay_logging(enable) != 0)) {
+ RTC_LOG(LS_ERROR) << "Failed to enable/disable EC metrics: " << enable;
+ return;
+ }
+ RTC_LOG(LS_INFO) << "EC metrics set to " << enable;
+}
+
+void SetAecmMode(AudioProcessing* apm, bool enable) {
+ RTC_DCHECK(apm);
+ EchoControlMobile* ecm = apm->echo_control_mobile();
+ RTC_DCHECK_EQ(EchoControlMobile::kSpeakerphone, ecm->routing_mode());
+ if (ecm->enable_comfort_noise(enable) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to enable/disable CNG: " << enable;
+ return;
+ }
+ RTC_LOG(LS_INFO) << "CNG set to " << enable;
+}
+
+void SetNsStatus(AudioProcessing* apm, bool enable) {
+ RTC_DCHECK(apm);
+ NoiseSuppression* ns = apm->noise_suppression();
+ if (ns->set_level(NoiseSuppression::kHigh) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to set high NS level.";
+ return;
+ }
+ if (ns->Enable(enable) != 0) {
+ RTC_LOG(LS_ERROR) << "Failed to enable/disable NS: " << enable;
+ return;
+ }
+ RTC_LOG(LS_INFO) << "NS set to " << enable;
+}
+
+void SetTypingDetectionStatus(AudioProcessing* apm, bool enable) {
+ RTC_DCHECK(apm);
+#if WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+ // Typing detection takes place in TransmitMixer::PrepareDemux() and
+ // TransmitMixer::TypingDetection(). The typing detection algorithm takes as
+ // input two booleans:
+ // 1. A signal whether a key was pressed during the audio frame.
+ // 2. Whether VAD is active or not.
+ // TransmitMixer will not even call the detector if APM has set kVadUnknown in
+ // the audio frame after near end processing, so enabling/disabling VAD is
+ // sufficient for turning typing detection on/off.
+ // TODO(solenberg): Rather than relying on a side effect, consider forcing the
+ // feature on/off in TransmitMixer.
+ VoiceDetection* vd = apm->voice_detection();
+ if (vd->Enable(enable)) {
+ RTC_LOG(LS_ERROR) << "Failed to enable/disable VAD: " << enable;
+ return;
+ }
+ if (vd->set_likelihood(VoiceDetection::kVeryLowLikelihood)) {
+ RTC_LOG(LS_ERROR) << "Failed to set low VAD likelihood.";
+ return;
+ }
+ RTC_LOG(LS_INFO) << "VAD set to " << enable << " for typing detection.";
+#endif
+}
+} // namespace apm_helpers
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/apm_helpers.h b/third_party/libwebrtc/webrtc/media/engine/apm_helpers.h
new file mode 100644
index 0000000000..caa485f477
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/apm_helpers.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_APM_HELPERS_H_
+#define MEDIA_ENGINE_APM_HELPERS_H_
+
+#include <cstdint>
+
+namespace webrtc {
+
+class AudioProcessing;
+class AudioDeviceModule;
+
+enum EcModes {
+ kEcConference, // Conferencing default (aggressive AEC).
+ kEcAecm, // AEC mobile.
+};
+
+struct AgcConfig {
+ uint16_t targetLeveldBOv;
+ uint16_t digitalCompressionGaindB;
+ bool limiterEnable;
+};
+
+namespace apm_helpers {
+
+void Init(AudioProcessing* apm);
+AgcConfig GetAgcConfig(AudioProcessing* apm);
+void SetAgcConfig(AudioProcessing* apm,
+ const AgcConfig& config);
+void SetAgcStatus(AudioProcessing* apm,
+ AudioDeviceModule* adm,
+ bool enable);
+void SetEcStatus(AudioProcessing* apm,
+ bool enable,
+ EcModes mode);
+void SetEcMetricsStatus(AudioProcessing* apm, bool enable);
+void SetAecmMode(AudioProcessing* apm, bool enable_cng);
+void SetNsStatus(AudioProcessing* apm, bool enable);
+void SetTypingDetectionStatus(AudioProcessing* apm, bool enable);
+
+} // namespace apm_helpers
+} // namespace webrtc
+
+#endif // MEDIA_ENGINE_APM_HELPERS_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/apm_helpers_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/apm_helpers_unittest.cc
new file mode 100644
index 0000000000..c20eb6faff
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/apm_helpers_unittest.cc
@@ -0,0 +1,287 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/apm_helpers.h"
+
+#include "media/engine/webrtcvoe.h"
+#include "modules/audio_device/include/mock_audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/mock_audio_decoder_factory.h"
+#include "voice_engine/transmit_mixer.h"
+
+namespace webrtc {
+namespace {
+
+constexpr AgcConfig kDefaultAgcConfig = { 3, 9, true };
+
+struct TestHelper {
+ TestHelper() {
+ // This replicates the conditions from voe_auto_test.
+ Config config;
+ config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
+ apm_ = rtc::scoped_refptr<AudioProcessing>(AudioProcessing::Create(config));
+ apm_helpers::Init(apm());
+ EXPECT_EQ(0, voe_wrapper_.base()->Init(
+ &mock_audio_device_, apm_,
+ MockAudioDecoderFactory::CreateEmptyFactory()));
+ }
+
+ AudioProcessing* apm() { return apm_.get(); }
+
+ const AudioProcessing* apm() const { return apm_.get(); }
+
+ test::MockAudioDeviceModule* adm() {
+ return &mock_audio_device_;
+ }
+
+ voe::TransmitMixer* transmit_mixer() {
+ return voe_wrapper_.base()->transmit_mixer();
+ }
+
+ bool GetEcMetricsStatus() const {
+ EchoCancellation* ec = apm()->echo_cancellation();
+ bool metrics_enabled = ec->are_metrics_enabled();
+ EXPECT_EQ(metrics_enabled, ec->is_delay_logging_enabled());
+ return metrics_enabled;
+ }
+
+ bool CanGetEcMetrics() const {
+ EchoCancellation* ec = apm()->echo_cancellation();
+ EchoCancellation::Metrics metrics;
+ int metrics_result = ec->GetMetrics(&metrics);
+ int median = 0;
+ int std = 0;
+ float fraction = 0;
+ int delay_metrics_result = ec->GetDelayMetrics(&median, &std, &fraction);
+ return metrics_result == AudioProcessing::kNoError &&
+ delay_metrics_result == AudioProcessing::kNoError;
+ }
+
+ private:
+ testing::NiceMock<test::MockAudioDeviceModule> mock_audio_device_;
+ cricket::VoEWrapper voe_wrapper_;
+ rtc::scoped_refptr<AudioProcessing> apm_;
+};
+} // namespace
+
+TEST(ApmHelpersTest, AgcConfig_DefaultConfiguration) {
+ TestHelper helper;
+ AgcConfig agc_config =
+ apm_helpers::GetAgcConfig(helper.apm());
+
+ EXPECT_EQ(kDefaultAgcConfig.targetLeveldBOv, agc_config.targetLeveldBOv);
+ EXPECT_EQ(kDefaultAgcConfig.digitalCompressionGaindB,
+ agc_config.digitalCompressionGaindB);
+ EXPECT_EQ(kDefaultAgcConfig.limiterEnable, agc_config.limiterEnable);
+}
+
+TEST(ApmHelpersTest, AgcConfig_GetAndSet) {
+ const AgcConfig agc_config = { 11, 17, false };
+
+ TestHelper helper;
+ apm_helpers::SetAgcConfig(helper.apm(), agc_config);
+ AgcConfig actual_config =
+ apm_helpers::GetAgcConfig(helper.apm());
+
+ EXPECT_EQ(agc_config.digitalCompressionGaindB,
+ actual_config.digitalCompressionGaindB);
+ EXPECT_EQ(agc_config.limiterEnable,
+ actual_config.limiterEnable);
+ EXPECT_EQ(agc_config.targetLeveldBOv,
+ actual_config.targetLeveldBOv);
+}
+
+TEST(ApmHelpersTest, AgcStatus_DefaultMode) {
+ TestHelper helper;
+ GainControl* gc = helper.apm()->gain_control();
+ EXPECT_FALSE(gc->is_enabled());
+#if defined(TARGET_IPHONE_SIMULATOR) && TARGET_IPHONE_SIMULATOR
+ EXPECT_EQ(GainControl::kAdaptiveAnalog, gc->mode());
+#elif defined(WEBRTC_IOS) || defined(WEBRTC_ANDROID)
+ EXPECT_EQ(GainControl::kFixedDigital, gc->mode());
+#else
+ EXPECT_EQ(GainControl::kAdaptiveAnalog, gc->mode());
+#endif
+}
+
+TEST(ApmHelpersTest, AgcStatus_EnableDisable) {
+ TestHelper helper;
+ GainControl* gc = helper.apm()->gain_control();
+#if defined(WEBRTC_IOS) || defined(WEBRTC_ANDROID)
+ apm_helpers::SetAgcStatus(helper.apm(), helper.adm(), false);
+ EXPECT_FALSE(gc->is_enabled());
+ EXPECT_EQ(GainControl::kFixedDigital, gc->mode());
+
+ apm_helpers::SetAgcStatus(helper.apm(), helper.adm(), true);
+ EXPECT_TRUE(gc->is_enabled());
+ EXPECT_EQ(GainControl::kFixedDigital, gc->mode());
+#else
+ EXPECT_CALL(*helper.adm(), SetAGC(false)).WillOnce(testing::Return(0));
+ apm_helpers::SetAgcStatus(helper.apm(), helper.adm(), false);
+ EXPECT_FALSE(gc->is_enabled());
+ EXPECT_EQ(GainControl::kAdaptiveAnalog, gc->mode());
+
+ EXPECT_CALL(*helper.adm(), SetAGC(true)).WillOnce(testing::Return(0));
+ apm_helpers::SetAgcStatus(helper.apm(), helper.adm(), true);
+ EXPECT_TRUE(gc->is_enabled());
+ EXPECT_EQ(GainControl::kAdaptiveAnalog, gc->mode());
+#endif
+}
+
+TEST(ApmHelpersTest, EcStatus_DefaultMode) {
+ TestHelper helper;
+ EchoCancellation* ec = helper.apm()->echo_cancellation();
+ EchoControlMobile* ecm = helper.apm()->echo_control_mobile();
+ EXPECT_FALSE(ec->is_enabled());
+ EXPECT_FALSE(ecm->is_enabled());
+}
+
+TEST(ApmHelpersTest, EcStatus_EnableDisable) {
+ TestHelper helper;
+ EchoCancellation* ec = helper.apm()->echo_cancellation();
+ EchoControlMobile* ecm = helper.apm()->echo_control_mobile();
+
+ apm_helpers::SetEcStatus(helper.apm(), true, kEcAecm);
+ EXPECT_FALSE(ec->is_enabled());
+ EXPECT_TRUE(ecm->is_enabled());
+
+ apm_helpers::SetEcStatus(helper.apm(), false, kEcAecm);
+ EXPECT_FALSE(ec->is_enabled());
+ EXPECT_FALSE(ecm->is_enabled());
+
+ apm_helpers::SetEcStatus(helper.apm(), true, kEcConference);
+ EXPECT_TRUE(ec->is_enabled());
+ EXPECT_FALSE(ecm->is_enabled());
+ EXPECT_EQ(EchoCancellation::kHighSuppression, ec->suppression_level());
+
+ apm_helpers::SetEcStatus(helper.apm(), false, kEcConference);
+ EXPECT_FALSE(ec->is_enabled());
+ EXPECT_FALSE(ecm->is_enabled());
+ EXPECT_EQ(EchoCancellation::kHighSuppression, ec->suppression_level());
+
+ apm_helpers::SetEcStatus(helper.apm(), true, kEcAecm);
+ EXPECT_FALSE(ec->is_enabled());
+ EXPECT_TRUE(ecm->is_enabled());
+}
+
+TEST(ApmHelpersTest, EcMetrics_DefaultMode) {
+ TestHelper helper;
+ apm_helpers::SetEcStatus(helper.apm(), true, kEcConference);
+ EXPECT_TRUE(helper.GetEcMetricsStatus());
+}
+
+TEST(ApmHelpersTest, EcMetrics_CanEnableDisable) {
+ TestHelper helper;
+ apm_helpers::SetEcStatus(helper.apm(), true, kEcConference);
+
+ apm_helpers::SetEcMetricsStatus(helper.apm(), true);
+ EXPECT_TRUE(helper.GetEcMetricsStatus());
+ apm_helpers::SetEcMetricsStatus(helper.apm(), false);
+ EXPECT_FALSE(helper.GetEcMetricsStatus());
+}
+
+TEST(ApmHelpersTest, EcMetrics_NoStatsUnlessEcMetricsAndEcEnabled) {
+ TestHelper helper;
+ EXPECT_FALSE(helper.CanGetEcMetrics());
+
+ apm_helpers::SetEcMetricsStatus(helper.apm(), true);
+ EXPECT_FALSE(helper.CanGetEcMetrics());
+
+ apm_helpers::SetEcStatus(helper.apm(), true, kEcConference);
+ EXPECT_TRUE(helper.CanGetEcMetrics());
+
+ apm_helpers::SetEcMetricsStatus(helper.apm(), false);
+ EXPECT_FALSE(helper.CanGetEcMetrics());
+}
+
+TEST(ApmHelpersTest, AecmMode_DefaultMode) {
+ TestHelper helper;
+ EchoControlMobile* ecm = helper.apm()->echo_control_mobile();
+ EXPECT_EQ(EchoControlMobile::kSpeakerphone, ecm->routing_mode());
+ EXPECT_TRUE(ecm->is_comfort_noise_enabled());
+}
+
+TEST(ApmHelpersTest, AecmMode_EnableDisableCng) {
+ TestHelper helper;
+ EchoControlMobile* ecm = helper.apm()->echo_control_mobile();
+ apm_helpers::SetAecmMode(helper.apm(), false);
+ EXPECT_FALSE(ecm->is_comfort_noise_enabled());
+ apm_helpers::SetAecmMode(helper.apm(), true);
+ EXPECT_TRUE(ecm->is_comfort_noise_enabled());
+}
+
+TEST(ApmHelpersTest, NsStatus_DefaultMode) {
+ TestHelper helper;
+ NoiseSuppression* ns = helper.apm()->noise_suppression();
+ EXPECT_EQ(NoiseSuppression::kModerate, ns->level());
+ EXPECT_FALSE(ns->is_enabled());
+}
+
+TEST(ApmHelpersTest, NsStatus_EnableDisable) {
+ TestHelper helper;
+ NoiseSuppression* ns = helper.apm()->noise_suppression();
+ apm_helpers::SetNsStatus(helper.apm(), true);
+ EXPECT_EQ(NoiseSuppression::kHigh, ns->level());
+ EXPECT_TRUE(ns->is_enabled());
+ apm_helpers::SetNsStatus(helper.apm(), false);
+ EXPECT_EQ(NoiseSuppression::kHigh, ns->level());
+ EXPECT_FALSE(ns->is_enabled());
+}
+
+TEST(ApmHelpersTest, TypingDetectionStatus_DefaultMode) {
+ TestHelper helper;
+ VoiceDetection* vd = helper.apm()->voice_detection();
+ EXPECT_FALSE(vd->is_enabled());
+}
+
+// TODO(kthelgason): Reenable this test on simulator.
+// See bugs.webrtc.org/5569
+#if defined(TARGET_IPHONE_SIMULATOR) && TARGET_IPHONE_SIMULATOR
+#define MAYBE_TypingDetectionStatus_EnableDisable \
+ DISABLED_TypingDetectionStatus_EnableDisable
+#else
+#define MAYBE_TypingDetectionStatus_EnableDisable \
+ TypingDetectionStatus_EnableDisable
+#endif
+TEST(ApmHelpersTest, MAYBE_TypingDetectionStatus_EnableDisable) {
+ TestHelper helper;
+ VoiceDetection* vd = helper.apm()->voice_detection();
+ apm_helpers::SetTypingDetectionStatus(helper.apm(), true);
+ EXPECT_TRUE(vd->is_enabled());
+ apm_helpers::SetTypingDetectionStatus(helper.apm(), false);
+ EXPECT_FALSE(vd->is_enabled());
+}
+
+// TODO(solenberg): Move this test to a better place - added here for the sake
+// of duplicating all relevant tests from audio_processing_test.cc.
+TEST(ApmHelpersTest, HighPassFilter_DefaultMode) {
+ TestHelper helper;
+ EXPECT_FALSE(helper.apm()->high_pass_filter()->is_enabled());
+}
+
+// TODO(solenberg): Move this test to a better place - added here for the sake
+// of duplicating all relevant tests from audio_processing_test.cc.
+TEST(ApmHelpersTest, StereoSwapping_DefaultMode) {
+ TestHelper helper;
+ EXPECT_FALSE(helper.transmit_mixer()->IsStereoChannelSwappingEnabled());
+}
+
+// TODO(solenberg): Move this test to a better place - added here for the sake
+// of duplicating all relevant tests from audio_processing_test.cc.
+TEST(ApmHelpersTest, StereoSwapping_EnableDisable) {
+ TestHelper helper;
+ helper.transmit_mixer()->EnableStereoChannelSwapping(true);
+ EXPECT_TRUE(helper.transmit_mixer()->IsStereoChannelSwappingEnabled());
+ helper.transmit_mixer()->EnableStereoChannelSwapping(false);
+ EXPECT_FALSE(helper.transmit_mixer()->IsStereoChannelSwappingEnabled());
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/constants.cc b/third_party/libwebrtc/webrtc/media/engine/constants.cc
new file mode 100644
index 0000000000..736685fc79
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/constants.cc
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/constants.h"
+
+namespace cricket {
+
+const int kMinVideoBitrateBps = 30000;
+const int kVideoMtu = 1200;
+const int kVideoRtpBufferSize = 65536;
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/constants.h b/third_party/libwebrtc/webrtc/media/engine/constants.h
new file mode 100644
index 0000000000..ea6c0754a6
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/constants.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_CONSTANTS_H_
+#define MEDIA_ENGINE_CONSTANTS_H_
+
+namespace cricket {
+
+extern const int kVideoMtu;
+extern const int kVideoRtpBufferSize;
+
+extern const char kH264CodecName[];
+
+extern const int kMinVideoBitrateBps;
+
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_CONSTANTS_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/convert_legacy_video_factory.cc b/third_party/libwebrtc/webrtc/media/engine/convert_legacy_video_factory.cc
new file mode 100644
index 0000000000..1f43f5a4a1
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/convert_legacy_video_factory.cc
@@ -0,0 +1,226 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/convert_legacy_video_factory.h"
+
+#include <utility>
+#include <vector>
+
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "media/base/h264_profile_level_id.h"
+#include "media/engine/encoder_simulcast_proxy.h"
+#include "media/engine/internaldecoderfactory.h"
+#include "media/engine/internalencoderfactory.h"
+#include "media/engine/scopedvideodecoder.h"
+#include "media/engine/scopedvideoencoder.h"
+#include "media/engine/simulcast_encoder_adapter.h"
+#include "media/engine/videodecodersoftwarefallbackwrapper.h"
+#include "media/engine/videoencodersoftwarefallbackwrapper.h"
+#include "media/engine/webrtcvideodecoderfactory.h"
+#include "media/engine/webrtcvideoencoderfactory.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/ptr_util.h"
+
+namespace cricket {
+
+namespace {
+
+bool IsFormatSupported(
+ const std::vector<webrtc::SdpVideoFormat>& supported_formats,
+ const webrtc::SdpVideoFormat& format) {
+ for (const webrtc::SdpVideoFormat& supported_format : supported_formats) {
+ if (IsSameCodec(format.name, format.parameters, supported_format.name,
+ supported_format.parameters)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+// Converts the cricket::WebRtcVideoEncoderFactory to a
+// webrtc::VideoEncoderFactory (without adding any simulcast or SW fallback).
+class CricketToWebRtcEncoderFactory : public webrtc::VideoEncoderFactory {
+ public:
+ explicit CricketToWebRtcEncoderFactory(
+ std::unique_ptr<WebRtcVideoEncoderFactory> external_encoder_factory)
+ : external_encoder_factory_(std::move(external_encoder_factory)) {}
+
+ webrtc::VideoEncoderFactory::CodecInfo QueryVideoEncoder(
+ const webrtc::SdpVideoFormat& format) const override {
+ CodecInfo info;
+ info.has_internal_source = false;
+ info.is_hardware_accelerated = false;
+ if (!external_encoder_factory_) return info;
+
+ info.has_internal_source =
+ external_encoder_factory_->EncoderTypeHasInternalSource(
+ webrtc::PayloadStringToCodecType(format.name));
+ info.is_hardware_accelerated = true;
+ return info;
+ }
+
+ std::vector<webrtc::SdpVideoFormat> GetSupportedFormats() const override {
+ if (!external_encoder_factory_)
+ return std::vector<webrtc::SdpVideoFormat>();
+
+ std::vector<webrtc::SdpVideoFormat> formats;
+ for (const VideoCodec& codec :
+ external_encoder_factory_->supported_codecs()) {
+ formats.push_back(webrtc::SdpVideoFormat(codec.name, codec.params));
+ }
+ return formats;
+ }
+
+ std::unique_ptr<webrtc::VideoEncoder> CreateVideoEncoder(
+ const webrtc::SdpVideoFormat& format) override {
+ return CreateScopedVideoEncoder(external_encoder_factory_.get(),
+ VideoCodec(format));
+ }
+
+ private:
+ const std::unique_ptr<WebRtcVideoEncoderFactory> external_encoder_factory_;
+};
+
+// This class combines an external factory with the internal factory and adds
+// internal SW codecs, simulcast, and SW fallback wrappers.
+class EncoderAdapter : public webrtc::VideoEncoderFactory {
+ public:
+ explicit EncoderAdapter(
+ std::unique_ptr<WebRtcVideoEncoderFactory> external_encoder_factory)
+ : internal_encoder_factory_(new webrtc::InternalEncoderFactory()),
+ external_encoder_factory_(
+ rtc::MakeUnique<CricketToWebRtcEncoderFactory>(
+ std::move(external_encoder_factory))) {}
+
+ webrtc::VideoEncoderFactory::CodecInfo QueryVideoEncoder(
+ const webrtc::SdpVideoFormat& format) const override {
+ if (IsFormatSupported(external_encoder_factory_->GetSupportedFormats(),
+ format)) {
+ return external_encoder_factory_->QueryVideoEncoder(format);
+ }
+
+ // Format must be one of the internal formats.
+ RTC_DCHECK(IsFormatSupported(
+ internal_encoder_factory_->GetSupportedFormats(), format));
+ webrtc::VideoEncoderFactory::CodecInfo info;
+ info.has_internal_source = false;
+ info.is_hardware_accelerated = false;
+ return info;
+ }
+
+ std::unique_ptr<webrtc::VideoEncoder> CreateVideoEncoder(
+ const webrtc::SdpVideoFormat& format) override {
+ // Try creating internal encoder.
+ std::unique_ptr<webrtc::VideoEncoder> internal_encoder;
+ if (IsFormatSupported(internal_encoder_factory_->GetSupportedFormats(),
+ format)) {
+ internal_encoder = rtc::MakeUnique<webrtc::EncoderSimulcastProxy>(
+ internal_encoder_factory_.get(), format);
+ }
+
+ // Try creating external encoder.
+ std::unique_ptr<webrtc::VideoEncoder> external_encoder;
+ if (IsFormatSupported(external_encoder_factory_->GetSupportedFormats(),
+ format)) {
+ external_encoder = rtc::MakeUnique<webrtc::EncoderSimulcastProxy>(
+ internal_encoder_factory_.get(), format);
+ }
+
+ if (internal_encoder && external_encoder) {
+ // Both internal SW encoder and external HW encoder available - create
+ // fallback encoder.
+ return rtc::MakeUnique<webrtc::VideoEncoderSoftwareFallbackWrapper>(
+ std::move(internal_encoder), std::move(external_encoder));
+ }
+ return external_encoder ? std::move(external_encoder)
+ : std::move(internal_encoder);
+ }
+
+ std::vector<webrtc::SdpVideoFormat> GetSupportedFormats() const override {
+ std::vector<webrtc::SdpVideoFormat> formats =
+ internal_encoder_factory_->GetSupportedFormats();
+
+ // Add external codecs.
+ for (const webrtc::SdpVideoFormat& format :
+ external_encoder_factory_->GetSupportedFormats()) {
+ // Don't add same codec twice.
+ if (!IsFormatSupported(formats, format)) formats.push_back(format);
+ }
+
+ return formats;
+ }
+
+ private:
+ const std::unique_ptr<webrtc::VideoEncoderFactory> internal_encoder_factory_;
+ const std::unique_ptr<webrtc::VideoEncoderFactory> external_encoder_factory_;
+};
+
+// This class combines an external factory with the internal factory and adds
+// internal SW codecs, simulcast, and SW fallback wrappers.
+class DecoderAdapter : public webrtc::VideoDecoderFactory {
+ public:
+ explicit DecoderAdapter(
+ std::unique_ptr<WebRtcVideoDecoderFactory> external_decoder_factory)
+ : external_decoder_factory_(std::move(external_decoder_factory)) {}
+
+ std::unique_ptr<webrtc::VideoDecoder> CreateVideoDecoder(
+ const webrtc::SdpVideoFormat& format) override {
+ std::unique_ptr<webrtc::VideoDecoder> internal_decoder;
+ webrtc::InternalDecoderFactory internal_decoder_factory;
+ if (IsFormatSupported(internal_decoder_factory.GetSupportedFormats(),
+ format)) {
+ internal_decoder = internal_decoder_factory.CreateVideoDecoder(format);
+ }
+
+ const VideoCodec codec(format);
+ const VideoDecoderParams params = {};
+ if (external_decoder_factory_ != nullptr) {
+ std::unique_ptr<webrtc::VideoDecoder> external_decoder =
+ CreateScopedVideoDecoder(external_decoder_factory_.get(), codec,
+ params);
+ if (external_decoder) {
+ if (!internal_decoder) return external_decoder;
+ // Both external and internal decoder available - create fallback
+ // wrapper.
+ return std::unique_ptr<webrtc::VideoDecoder>(
+ new webrtc::VideoDecoderSoftwareFallbackWrapper(
+ std::move(internal_decoder), std::move(external_decoder)));
+ }
+ }
+
+ return internal_decoder;
+ }
+
+ std::vector<webrtc::SdpVideoFormat> GetSupportedFormats() const override {
+ // This is not implemented for the legacy decoder factory.
+ RTC_NOTREACHED();
+ return std::vector<webrtc::SdpVideoFormat>();
+ }
+
+ private:
+ const std::unique_ptr<WebRtcVideoDecoderFactory> external_decoder_factory_;
+};
+
+} // namespace
+
+std::unique_ptr<webrtc::VideoEncoderFactory> ConvertVideoEncoderFactory(
+ std::unique_ptr<WebRtcVideoEncoderFactory> external_encoder_factory) {
+ return std::unique_ptr<webrtc::VideoEncoderFactory>(
+ new EncoderAdapter(std::move(external_encoder_factory)));
+}
+
+std::unique_ptr<webrtc::VideoDecoderFactory> ConvertVideoDecoderFactory(
+ std::unique_ptr<WebRtcVideoDecoderFactory> external_decoder_factory) {
+ return std::unique_ptr<webrtc::VideoDecoderFactory>(
+ new DecoderAdapter(std::move(external_decoder_factory)));
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/convert_legacy_video_factory.h b/third_party/libwebrtc/webrtc/media/engine/convert_legacy_video_factory.h
new file mode 100644
index 0000000000..5bd3580a6f
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/convert_legacy_video_factory.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_CONVERT_LEGACY_VIDEO_FACTORY_H_
+#define MEDIA_ENGINE_CONVERT_LEGACY_VIDEO_FACTORY_H_
+
+#include <memory>
+
+namespace webrtc {
+class VideoEncoderFactory;
+class VideoDecoderFactory;
+} // namespace webrtc
+
+namespace cricket {
+
+class WebRtcVideoEncoderFactory;
+class WebRtcVideoDecoderFactory;
+
+// Adds internal SW codecs, simulcast, SW fallback wrappers, and converts to the
+// new type of codec factories. The purpose of these functions is to provide an
+// easy way for clients to migrate to the API with new factory types.
+// TODO(magjed): Remove once old factories are gone, webrtc:7925.
+std::unique_ptr<webrtc::VideoEncoderFactory> ConvertVideoEncoderFactory(
+ std::unique_ptr<WebRtcVideoEncoderFactory> external_encoder_factory);
+
+std::unique_ptr<webrtc::VideoDecoderFactory> ConvertVideoDecoderFactory(
+ std::unique_ptr<WebRtcVideoDecoderFactory> external_decoder_factory);
+
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_CONVERT_LEGACY_VIDEO_FACTORY_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/encoder_simulcast_proxy.cc b/third_party/libwebrtc/webrtc/media/engine/encoder_simulcast_proxy.cc
new file mode 100644
index 0000000000..84ffed2577
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/encoder_simulcast_proxy.cc
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/encoder_simulcast_proxy.h"
+
+#include "media/engine/simulcast_encoder_adapter.h"
+#include "modules/video_coding/include/video_error_codes.h"
+
+namespace webrtc {
+EncoderSimulcastProxy::EncoderSimulcastProxy(VideoEncoderFactory* factory,
+ const SdpVideoFormat& format)
+ : factory_(factory), video_format_(format), callback_(nullptr) {
+ encoder_ = factory_->CreateVideoEncoder(format);
+}
+
+EncoderSimulcastProxy::EncoderSimulcastProxy(VideoEncoderFactory* factory)
+ : EncoderSimulcastProxy(factory, SdpVideoFormat("VP8")) {}
+
+EncoderSimulcastProxy::~EncoderSimulcastProxy() {}
+
+int EncoderSimulcastProxy::Release() { return encoder_->Release(); }
+
+int EncoderSimulcastProxy::InitEncode(const VideoCodec* inst,
+ int number_of_cores,
+ size_t max_payload_size) {
+ int ret = encoder_->InitEncode(inst, number_of_cores, max_payload_size);
+ if (ret == WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED) {
+ encoder_.reset(new SimulcastEncoderAdapter(factory_));
+ if (callback_) {
+ encoder_->RegisterEncodeCompleteCallback(callback_);
+ }
+ ret = encoder_->InitEncode(inst, number_of_cores, max_payload_size);
+ }
+ return ret;
+}
+
+int EncoderSimulcastProxy::Encode(const VideoFrame& input_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const std::vector<FrameType>* frame_types) {
+ return encoder_->Encode(input_image, codec_specific_info, frame_types);
+}
+
+int EncoderSimulcastProxy::RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) {
+ callback_ = callback;
+ return encoder_->RegisterEncodeCompleteCallback(callback);
+}
+
+int EncoderSimulcastProxy::SetRateAllocation(const BitrateAllocation& bitrate,
+ uint32_t new_framerate) {
+ return encoder_->SetRateAllocation(bitrate, new_framerate);
+}
+
+int EncoderSimulcastProxy::SetChannelParameters(uint32_t packet_loss,
+ int64_t rtt) {
+ return encoder_->SetChannelParameters(packet_loss, rtt);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/encoder_simulcast_proxy.h b/third_party/libwebrtc/webrtc/media/engine/encoder_simulcast_proxy.h
new file mode 100644
index 0000000000..b7c13d7239
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/encoder_simulcast_proxy.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef MEDIA_ENGINE_ENCODER_SIMULCAST_PROXY_H_
+#define MEDIA_ENGINE_ENCODER_SIMULCAST_PROXY_H_
+
+#include <stddef.h>
+#include <stdint.h>
+#include <memory>
+#include <vector>
+
+#include "api/video/video_frame.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_encoder.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "common_types.h" // NOLINT(build/include)
+#include "modules/video_coding/include/video_codec_interface.h"
+
+namespace webrtc {
+
+// This class provides fallback to SimulcastEncoderAdapter if default VP8Encoder
+// doesn't support simulcast for provided settings.
+class EncoderSimulcastProxy : public VideoEncoder {
+ public:
+ EncoderSimulcastProxy(VideoEncoderFactory* factory,
+ const SdpVideoFormat& format);
+ // Deprecated. Remove once all clients use constructor with both factory and
+ // SdpVideoFormat;
+ explicit EncoderSimulcastProxy(VideoEncoderFactory* factory);
+
+ ~EncoderSimulcastProxy() override;
+
+ // Implements VideoEncoder.
+ int Release() override;
+ int InitEncode(const VideoCodec* inst, int number_of_cores,
+ size_t max_payload_size) override;
+ int Encode(const VideoFrame& input_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const std::vector<FrameType>* frame_types) override;
+ int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;
+ int SetRateAllocation(const BitrateAllocation& bitrate,
+ uint32_t new_framerate) override;
+ int SetChannelParameters(uint32_t packet_loss, int64_t rtt) override;
+
+ private:
+ VideoEncoderFactory* const factory_;
+ SdpVideoFormat video_format_;
+ std::unique_ptr<VideoEncoder> encoder_;
+ EncodedImageCallback* callback_;
+};
+
+} // namespace webrtc
+
+#endif // MEDIA_ENGINE_ENCODER_SIMULCAST_PROXY_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/encoder_simulcast_proxy_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/encoder_simulcast_proxy_unittest.cc
new file mode 100644
index 0000000000..d1a88762b5
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/encoder_simulcast_proxy_unittest.cc
@@ -0,0 +1,242 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#include "media/engine/encoder_simulcast_proxy.h"
+#include <string>
+
+#include "api/test/mock_video_encoder_factory.h"
+#include "api/video_codecs/vp8_temporal_layers.h"
+#include "media/engine/webrtcvideoencoderfactory.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/video_codec_settings.h"
+
+namespace webrtc {
+namespace testing {
+
+using ::testing::_;
+using ::testing::NiceMock;
+using ::testing::Return;
+
+class MockEncoder : public VideoEncoder {
+ public:
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
+ MockEncoder() {}
+ virtual ~MockEncoder() {}
+
+ MOCK_METHOD3(InitEncode,
+ int32_t(const VideoCodec* codec_settings,
+ int32_t number_of_cores,
+ size_t max_payload_size));
+
+ MOCK_METHOD1(RegisterEncodeCompleteCallback, int32_t(EncodedImageCallback*));
+
+ MOCK_METHOD0(Release, int32_t());
+
+ MOCK_METHOD3(
+ Encode,
+ int32_t(const VideoFrame& inputImage,
+ const CodecSpecificInfo* codecSpecificInfo,
+ const std::vector<FrameType>* frame_types) /* override */);
+
+ MOCK_CONST_METHOD0(GetEncoderInfo, VideoEncoder::EncoderInfo(void));
+};
+
+TEST(EncoderSimulcastProxy, ChoosesCorrectImplementation) {
+ const std::string kImplementationName = "Fake";
+ const std::string kSimulcastAdaptedImplementationName =
+ "SimulcastEncoderAdapter (Fake, Fake, Fake)";
+ VideoCodec codec_settings;
+ webrtc::test::CodecSettings(kVideoCodecVP8, &codec_settings);
+ codec_settings.simulcastStream[0] = {test::kTestWidth,
+ test::kTestHeight,
+ test::kTestFrameRate,
+ 2,
+ 2000,
+ 1000,
+ 1000,
+ 56};
+ codec_settings.simulcastStream[1] = {test::kTestWidth,
+ test::kTestHeight,
+ test::kTestFrameRate,
+ 2,
+ 3000,
+ 1000,
+ 1000,
+ 56};
+ codec_settings.simulcastStream[2] = {test::kTestWidth,
+ test::kTestHeight,
+ test::kTestFrameRate,
+ 2,
+ 5000,
+ 1000,
+ 1000,
+ 56};
+ codec_settings.numberOfSimulcastStreams = 3;
+
+ NiceMock<MockEncoder>* mock_encoder = new NiceMock<MockEncoder>();
+ NiceMock<MockVideoEncoderFactory> simulcast_factory;
+
+ EXPECT_CALL(*mock_encoder, InitEncode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ VideoEncoder::EncoderInfo encoder_info;
+ encoder_info.implementation_name = kImplementationName;
+ EXPECT_CALL(*mock_encoder, GetEncoderInfo())
+ .WillRepeatedly(Return(encoder_info));
+
+ EXPECT_CALL(simulcast_factory, CreateVideoEncoderProxy(_))
+ .Times(1)
+ .WillOnce(Return(mock_encoder));
+
+ EncoderSimulcastProxy simulcast_enabled_proxy(&simulcast_factory,
+ SdpVideoFormat("VP8"));
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ simulcast_enabled_proxy.InitEncode(&codec_settings, 4, 1200));
+ EXPECT_EQ(kImplementationName,
+ simulcast_enabled_proxy.GetEncoderInfo().implementation_name);
+
+ NiceMock<MockEncoder>* mock_encoder1 = new NiceMock<MockEncoder>();
+ NiceMock<MockEncoder>* mock_encoder2 = new NiceMock<MockEncoder>();
+ NiceMock<MockEncoder>* mock_encoder3 = new NiceMock<MockEncoder>();
+ NiceMock<MockEncoder>* mock_encoder4 = new NiceMock<MockEncoder>();
+ NiceMock<MockVideoEncoderFactory> nonsimulcast_factory;
+
+ EXPECT_CALL(*mock_encoder1, InitEncode(_, _, _))
+ .WillOnce(
+ Return(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED));
+ EXPECT_CALL(*mock_encoder1, GetEncoderInfo())
+ .WillRepeatedly(Return(encoder_info));
+
+ EXPECT_CALL(*mock_encoder2, InitEncode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*mock_encoder2, GetEncoderInfo())
+ .WillRepeatedly(Return(encoder_info));
+
+ EXPECT_CALL(*mock_encoder3, InitEncode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*mock_encoder3, GetEncoderInfo())
+ .WillRepeatedly(Return(encoder_info));
+
+ EXPECT_CALL(*mock_encoder4, InitEncode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*mock_encoder4, GetEncoderInfo())
+ .WillRepeatedly(Return(encoder_info));
+
+ EXPECT_CALL(nonsimulcast_factory, CreateVideoEncoderProxy(_))
+ .Times(4)
+ .WillOnce(Return(mock_encoder1))
+ .WillOnce(Return(mock_encoder2))
+ .WillOnce(Return(mock_encoder3))
+ .WillOnce(Return(mock_encoder4));
+
+ EncoderSimulcastProxy simulcast_disabled_proxy(&nonsimulcast_factory,
+ SdpVideoFormat("VP8"));
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ simulcast_disabled_proxy.InitEncode(&codec_settings, 4, 1200));
+ EXPECT_EQ(kSimulcastAdaptedImplementationName,
+ simulcast_disabled_proxy.GetEncoderInfo().implementation_name);
+
+ // Cleanup.
+ simulcast_enabled_proxy.Release();
+ simulcast_disabled_proxy.Release();
+}
+
+TEST(EncoderSimulcastProxy, ForwardsTrustedSetting) {
+ NiceMock<MockEncoder>* mock_encoder = new NiceMock<MockEncoder>();
+ NiceMock<MockVideoEncoderFactory> simulcast_factory;
+
+ EXPECT_CALL(*mock_encoder, InitEncode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+
+ EXPECT_CALL(simulcast_factory, CreateVideoEncoderProxy(_))
+ .Times(1)
+ .WillOnce(Return(mock_encoder));
+
+ EncoderSimulcastProxy simulcast_enabled_proxy(&simulcast_factory,
+ SdpVideoFormat("VP8"));
+ VideoCodec codec_settings;
+ webrtc::test::CodecSettings(kVideoCodecVP8, &codec_settings);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ simulcast_enabled_proxy.InitEncode(&codec_settings, 4, 1200));
+
+ VideoEncoder::EncoderInfo info;
+ info.has_trusted_rate_controller = true;
+ EXPECT_CALL(*mock_encoder, GetEncoderInfo()).WillRepeatedly(Return(info));
+
+ EXPECT_TRUE(
+ simulcast_enabled_proxy.GetEncoderInfo().has_trusted_rate_controller);
+}
+
+TEST(EncoderSimulcastProxy, ForwardsHardwareAccelerated) {
+ NiceMock<MockEncoder>* mock_encoder = new NiceMock<MockEncoder>();
+ NiceMock<MockVideoEncoderFactory> simulcast_factory;
+
+ EXPECT_CALL(*mock_encoder, InitEncode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+
+ EXPECT_CALL(simulcast_factory, CreateVideoEncoderProxy(_))
+ .Times(1)
+ .WillOnce(Return(mock_encoder));
+
+ EncoderSimulcastProxy simulcast_enabled_proxy(&simulcast_factory,
+ SdpVideoFormat("VP8"));
+ VideoCodec codec_settings;
+ webrtc::test::CodecSettings(kVideoCodecVP8, &codec_settings);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ simulcast_enabled_proxy.InitEncode(&codec_settings, 4, 1200));
+
+ VideoEncoder::EncoderInfo info;
+
+ info.is_hardware_accelerated = false;
+ EXPECT_CALL(*mock_encoder, GetEncoderInfo()).WillOnce(Return(info));
+ EXPECT_FALSE(
+ simulcast_enabled_proxy.GetEncoderInfo().is_hardware_accelerated);
+
+ info.is_hardware_accelerated = true;
+ EXPECT_CALL(*mock_encoder, GetEncoderInfo()).WillOnce(Return(info));
+ EXPECT_TRUE(simulcast_enabled_proxy.GetEncoderInfo().is_hardware_accelerated);
+}
+
+TEST(EncoderSimulcastProxy, ForwardsInternalSource) {
+ NiceMock<MockEncoder>* mock_encoder = new NiceMock<MockEncoder>();
+ NiceMock<MockVideoEncoderFactory> simulcast_factory;
+
+ EXPECT_CALL(*mock_encoder, InitEncode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+
+ EXPECT_CALL(simulcast_factory, CreateVideoEncoderProxy(_))
+ .Times(1)
+ .WillOnce(Return(mock_encoder));
+
+ EncoderSimulcastProxy simulcast_enabled_proxy(&simulcast_factory,
+ SdpVideoFormat("VP8"));
+ VideoCodec codec_settings;
+ webrtc::test::CodecSettings(kVideoCodecVP8, &codec_settings);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ simulcast_enabled_proxy.InitEncode(&codec_settings, 4, 1200));
+
+ VideoEncoder::EncoderInfo info;
+
+ info.has_internal_source = false;
+ EXPECT_CALL(*mock_encoder, GetEncoderInfo()).WillOnce(Return(info));
+ EXPECT_FALSE(simulcast_enabled_proxy.GetEncoderInfo().has_internal_source);
+
+ info.has_internal_source = true;
+ EXPECT_CALL(*mock_encoder, GetEncoderInfo()).WillOnce(Return(info));
+ EXPECT_TRUE(simulcast_enabled_proxy.GetEncoderInfo().has_internal_source);
+}
+
+} // namespace testing
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/fakewebrtccall.cc b/third_party/libwebrtc/webrtc/media/engine/fakewebrtccall.cc
new file mode 100644
index 0000000000..3d95e511e6
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/fakewebrtccall.cc
@@ -0,0 +1,648 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/fakewebrtccall.h"
+
+#include <algorithm>
+#include <utility>
+
+#include "api/call/audio_sink.h"
+#include "media/base/rtputils.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/platform_file.h"
+
+namespace cricket {
+FakeAudioSendStream::FakeAudioSendStream(
+ int id, const webrtc::AudioSendStream::Config& config)
+ : id_(id), config_(config) {
+ RTC_DCHECK(config.voe_channel_id != -1);
+}
+
+void FakeAudioSendStream::Reconfigure(
+ const webrtc::AudioSendStream::Config& config) {
+ config_ = config;
+}
+
+const webrtc::AudioSendStream::Config&
+ FakeAudioSendStream::GetConfig() const {
+ return config_;
+}
+
+void FakeAudioSendStream::SetStats(
+ const webrtc::AudioSendStream::Stats& stats) {
+ stats_ = stats;
+}
+
+FakeAudioSendStream::TelephoneEvent
+ FakeAudioSendStream::GetLatestTelephoneEvent() const {
+ return latest_telephone_event_;
+}
+
+bool FakeAudioSendStream::SendTelephoneEvent(int payload_type,
+ int payload_frequency, int event,
+ int duration_ms) {
+ latest_telephone_event_.payload_type = payload_type;
+ latest_telephone_event_.payload_frequency = payload_frequency;
+ latest_telephone_event_.event_code = event;
+ latest_telephone_event_.duration_ms = duration_ms;
+ return true;
+}
+
+void FakeAudioSendStream::SetMuted(bool muted) {
+ muted_ = muted;
+}
+
+webrtc::AudioSendStream::Stats FakeAudioSendStream::GetStats() const {
+ return stats_;
+}
+
+webrtc::AudioSendStream::Stats FakeAudioSendStream::GetStats(
+ bool /*has_remote_tracks*/) const {
+ return stats_;
+}
+
+FakeAudioReceiveStream::FakeAudioReceiveStream(
+ int id, const webrtc::AudioReceiveStream::Config& config)
+ : id_(id), config_(config) {
+ RTC_DCHECK(config.voe_channel_id != -1);
+}
+
+const webrtc::AudioReceiveStream::Config&
+ FakeAudioReceiveStream::GetConfig() const {
+ return config_;
+}
+
+void FakeAudioReceiveStream::SetStats(
+ const webrtc::AudioReceiveStream::Stats& stats) {
+ stats_ = stats;
+}
+
+bool FakeAudioReceiveStream::VerifyLastPacket(const uint8_t* data,
+ size_t length) const {
+ return last_packet_ == rtc::Buffer(data, length);
+}
+
+bool FakeAudioReceiveStream::DeliverRtp(const uint8_t* packet,
+ size_t length,
+ const webrtc::PacketTime& packet_time) {
+ ++received_packets_;
+ last_packet_.SetData(packet, length);
+ return true;
+}
+
+webrtc::AudioReceiveStream::Stats FakeAudioReceiveStream::GetStats() const {
+ return stats_;
+}
+
+void FakeAudioReceiveStream::SetSink(
+ std::unique_ptr<webrtc::AudioSinkInterface> sink) {
+ sink_ = std::move(sink);
+}
+
+void FakeAudioReceiveStream::SetGain(float gain) {
+ gain_ = gain;
+}
+
+FakeVideoSendStream::FakeVideoSendStream(
+ webrtc::VideoSendStream::Config config,
+ webrtc::VideoEncoderConfig encoder_config)
+ : sending_(false),
+ config_(std::move(config)),
+ codec_settings_set_(false),
+ resolution_scaling_enabled_(false),
+ framerate_scaling_enabled_(false),
+ source_(nullptr),
+ num_swapped_frames_(0) {
+ RTC_DCHECK(config.encoder_settings.encoder != NULL);
+ ReconfigureVideoEncoder(std::move(encoder_config));
+}
+
+FakeVideoSendStream::~FakeVideoSendStream() {
+ if (source_)
+ source_->RemoveSink(this);
+}
+
+const webrtc::VideoSendStream::Config& FakeVideoSendStream::GetConfig() const {
+ return config_;
+}
+
+const webrtc::VideoEncoderConfig& FakeVideoSendStream::GetEncoderConfig()
+ const {
+ return encoder_config_;
+}
+
+const std::vector<webrtc::VideoStream>& FakeVideoSendStream::GetVideoStreams()
+ const {
+ return video_streams_;
+}
+
+bool FakeVideoSendStream::IsSending() const {
+ return sending_;
+}
+
+bool FakeVideoSendStream::GetVp8Settings(
+ webrtc::VideoCodecVP8* settings) const {
+ if (!codec_settings_set_) {
+ return false;
+ }
+
+ *settings = vpx_settings_.vp8;
+ return true;
+}
+
+bool FakeVideoSendStream::GetVp9Settings(
+ webrtc::VideoCodecVP9* settings) const {
+ if (!codec_settings_set_) {
+ return false;
+ }
+
+ *settings = vpx_settings_.vp9;
+ return true;
+}
+
+int FakeVideoSendStream::GetNumberOfSwappedFrames() const {
+ return num_swapped_frames_;
+}
+
+int FakeVideoSendStream::GetLastWidth() const {
+ return last_frame_->width();
+}
+
+int FakeVideoSendStream::GetLastHeight() const {
+ return last_frame_->height();
+}
+
+int64_t FakeVideoSendStream::GetLastTimestamp() const {
+ RTC_DCHECK(last_frame_->ntp_time_ms() == 0);
+ return last_frame_->render_time_ms();
+}
+
+void FakeVideoSendStream::OnFrame(const webrtc::VideoFrame& frame) {
+ ++num_swapped_frames_;
+ if (!last_frame_ ||
+ frame.width() != last_frame_->width() ||
+ frame.height() != last_frame_->height() ||
+ frame.rotation() != last_frame_->rotation()) {
+ video_streams_ = encoder_config_.video_stream_factory->CreateEncoderStreams(
+ frame.width(), frame.height(), encoder_config_);
+ }
+ last_frame_ = frame;
+}
+
+void FakeVideoSendStream::SetStats(
+ const webrtc::VideoSendStream::Stats& stats) {
+ stats_ = stats;
+}
+
+webrtc::VideoSendStream::Stats FakeVideoSendStream::GetStats() {
+ return stats_;
+}
+
+void FakeVideoSendStream::EnableEncodedFrameRecording(
+ const std::vector<rtc::PlatformFile>& files,
+ size_t byte_limit) {
+ for (rtc::PlatformFile file : files)
+ rtc::ClosePlatformFile(file);
+}
+
+void FakeVideoSendStream::ReconfigureVideoEncoder(
+ webrtc::VideoEncoderConfig config) {
+ int width, height;
+ if (last_frame_) {
+ width = last_frame_->width();
+ height = last_frame_->height();
+ } else {
+ width = height = 0;
+ }
+ video_streams_ = config.video_stream_factory->CreateEncoderStreams(
+ width, height, config);
+ if (config.encoder_specific_settings != NULL) {
+ if (config_.encoder_settings.payload_name == "VP8") {
+ config.encoder_specific_settings->FillVideoCodecVp8(&vpx_settings_.vp8);
+ if (!video_streams_.empty()) {
+ vpx_settings_.vp8.numberOfTemporalLayers = static_cast<unsigned char>(
+ video_streams_.back().temporal_layer_thresholds_bps.size() + 1);
+ }
+ } else if (config_.encoder_settings.payload_name == "VP9") {
+ config.encoder_specific_settings->FillVideoCodecVp9(&vpx_settings_.vp9);
+ if (!video_streams_.empty()) {
+ vpx_settings_.vp9.numberOfTemporalLayers = static_cast<unsigned char>(
+ video_streams_.back().temporal_layer_thresholds_bps.size() + 1);
+ }
+ } else {
+ ADD_FAILURE() << "Unsupported encoder payload: "
+ << config_.encoder_settings.payload_name;
+ }
+ }
+ codec_settings_set_ = config.encoder_specific_settings != NULL;
+ encoder_config_ = std::move(config);
+ ++num_encoder_reconfigurations_;
+}
+
+void FakeVideoSendStream::Start() {
+ sending_ = true;
+}
+
+void FakeVideoSendStream::Stop() {
+ sending_ = false;
+}
+
+void FakeVideoSendStream::SetSource(
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
+ const webrtc::VideoSendStream::DegradationPreference&
+ degradation_preference) {
+ RTC_DCHECK(source != source_);
+ if (source_)
+ source_->RemoveSink(this);
+ source_ = source;
+ switch (degradation_preference) {
+ case DegradationPreference::kMaintainFramerate:
+ resolution_scaling_enabled_ = true;
+ framerate_scaling_enabled_ = false;
+ break;
+ case DegradationPreference::kMaintainResolution:
+ resolution_scaling_enabled_ = false;
+ framerate_scaling_enabled_ = true;
+ break;
+ case DegradationPreference::kBalanced:
+ resolution_scaling_enabled_ = true;
+ framerate_scaling_enabled_ = true;
+ break;
+ case DegradationPreference::kDegradationDisabled:
+ resolution_scaling_enabled_ = false;
+ framerate_scaling_enabled_ = false;
+ break;
+ }
+ if (source)
+ source->AddOrUpdateSink(this, resolution_scaling_enabled_
+ ? sink_wants_
+ : rtc::VideoSinkWants());
+}
+
+void FakeVideoSendStream::InjectVideoSinkWants(
+ const rtc::VideoSinkWants& wants) {
+ sink_wants_ = wants;
+ source_->AddOrUpdateSink(this, wants);
+}
+
+FakeVideoReceiveStream::FakeVideoReceiveStream(
+ webrtc::VideoReceiveStream::Config config)
+ : config_(std::move(config)), receiving_(false) {}
+
+const webrtc::VideoReceiveStream::Config& FakeVideoReceiveStream::GetConfig()
+ const {
+ return config_;
+}
+
+bool FakeVideoReceiveStream::IsReceiving() const {
+ return receiving_;
+}
+
+void FakeVideoReceiveStream::InjectFrame(const webrtc::VideoFrame& frame) {
+ config_.renderer->OnFrame(frame);
+}
+
+webrtc::VideoReceiveStream::Stats FakeVideoReceiveStream::GetStats() const {
+ return stats_;
+}
+
+void FakeVideoReceiveStream::Start() {
+ receiving_ = true;
+}
+
+void FakeVideoReceiveStream::Stop() {
+ receiving_ = false;
+}
+
+void FakeVideoReceiveStream::SetStats(
+ const webrtc::VideoReceiveStream::Stats& stats) {
+ stats_ = stats;
+}
+
+void FakeVideoReceiveStream::EnableEncodedFrameRecording(rtc::PlatformFile file,
+ size_t byte_limit) {
+ rtc::ClosePlatformFile(file);
+}
+
+void FakeVideoReceiveStream::AddSecondarySink(
+ webrtc::RtpPacketSinkInterface* sink) {}
+
+void FakeVideoReceiveStream::RemoveSecondarySink(
+ const webrtc::RtpPacketSinkInterface* sink) {}
+
+FakeFlexfecReceiveStream::FakeFlexfecReceiveStream(
+ const webrtc::FlexfecReceiveStream::Config& config)
+ : config_(config) {}
+
+const webrtc::FlexfecReceiveStream::Config&
+FakeFlexfecReceiveStream::GetConfig() const {
+ return config_;
+}
+
+// TODO(brandtr): Implement when the stats have been designed.
+webrtc::FlexfecReceiveStream::Stats FakeFlexfecReceiveStream::GetStats() const {
+ return webrtc::FlexfecReceiveStream::Stats();
+}
+
+void FakeFlexfecReceiveStream::OnRtpPacket(const webrtc::RtpPacketReceived&) {
+ RTC_NOTREACHED() << "Not implemented.";
+}
+
+FakeCall::FakeCall(const webrtc::Call::Config& config)
+ : config_(config),
+ audio_network_state_(webrtc::kNetworkUp),
+ video_network_state_(webrtc::kNetworkUp),
+ num_created_send_streams_(0),
+ num_created_receive_streams_(0),
+ audio_transport_overhead_(0),
+ video_transport_overhead_(0) {}
+
+FakeCall::~FakeCall() {
+ EXPECT_EQ(0u, video_send_streams_.size());
+ EXPECT_EQ(0u, audio_send_streams_.size());
+ EXPECT_EQ(0u, video_receive_streams_.size());
+ EXPECT_EQ(0u, audio_receive_streams_.size());
+}
+
+webrtc::Call::Config FakeCall::GetConfig() const {
+ return config_;
+}
+
+const std::vector<FakeVideoSendStream*>& FakeCall::GetVideoSendStreams() {
+ return video_send_streams_;
+}
+
+const std::vector<FakeVideoReceiveStream*>& FakeCall::GetVideoReceiveStreams() {
+ return video_receive_streams_;
+}
+
+const std::vector<FakeAudioSendStream*>& FakeCall::GetAudioSendStreams() {
+ return audio_send_streams_;
+}
+
+const FakeAudioSendStream* FakeCall::GetAudioSendStream(uint32_t ssrc) {
+ for (const auto* p : GetAudioSendStreams()) {
+ if (p->GetConfig().rtp.ssrc == ssrc) {
+ return p;
+ }
+ }
+ return nullptr;
+}
+
+const std::vector<FakeAudioReceiveStream*>& FakeCall::GetAudioReceiveStreams() {
+ return audio_receive_streams_;
+}
+
+const FakeAudioReceiveStream* FakeCall::GetAudioReceiveStream(uint32_t ssrc) {
+ for (const auto* p : GetAudioReceiveStreams()) {
+ if (p->GetConfig().rtp.remote_ssrc == ssrc) {
+ return p;
+ }
+ }
+ return nullptr;
+}
+
+const std::vector<FakeFlexfecReceiveStream*>&
+FakeCall::GetFlexfecReceiveStreams() {
+ return flexfec_receive_streams_;
+}
+
+webrtc::NetworkState FakeCall::GetNetworkState(webrtc::MediaType media) const {
+ switch (media) {
+ case webrtc::MediaType::AUDIO:
+ return audio_network_state_;
+ case webrtc::MediaType::VIDEO:
+ return video_network_state_;
+ case webrtc::MediaType::DATA:
+ case webrtc::MediaType::ANY:
+ ADD_FAILURE() << "GetNetworkState called with unknown parameter.";
+ return webrtc::kNetworkDown;
+ }
+ // Even though all the values for the enum class are listed above,the compiler
+ // will emit a warning as the method may be called with a value outside of the
+ // valid enum range, unless this case is also handled.
+ ADD_FAILURE() << "GetNetworkState called with unknown parameter.";
+ return webrtc::kNetworkDown;
+}
+
+webrtc::AudioSendStream* FakeCall::CreateAudioSendStream(
+ const webrtc::AudioSendStream::Config& config) {
+ FakeAudioSendStream* fake_stream = new FakeAudioSendStream(next_stream_id_++,
+ config);
+ audio_send_streams_.push_back(fake_stream);
+ ++num_created_send_streams_;
+ return fake_stream;
+}
+
+void FakeCall::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) {
+ auto it = std::find(audio_send_streams_.begin(),
+ audio_send_streams_.end(),
+ static_cast<FakeAudioSendStream*>(send_stream));
+ if (it == audio_send_streams_.end()) {
+ ADD_FAILURE() << "DestroyAudioSendStream called with unknown parameter.";
+ } else {
+ delete *it;
+ audio_send_streams_.erase(it);
+ }
+}
+
+webrtc::AudioReceiveStream* FakeCall::CreateAudioReceiveStream(
+ const webrtc::AudioReceiveStream::Config& config) {
+ audio_receive_streams_.push_back(new FakeAudioReceiveStream(next_stream_id_++,
+ config));
+ ++num_created_receive_streams_;
+ return audio_receive_streams_.back();
+}
+
+void FakeCall::DestroyAudioReceiveStream(
+ webrtc::AudioReceiveStream* receive_stream) {
+ auto it = std::find(audio_receive_streams_.begin(),
+ audio_receive_streams_.end(),
+ static_cast<FakeAudioReceiveStream*>(receive_stream));
+ if (it == audio_receive_streams_.end()) {
+ ADD_FAILURE() << "DestroyAudioReceiveStream called with unknown parameter.";
+ } else {
+ delete *it;
+ audio_receive_streams_.erase(it);
+ }
+}
+
+webrtc::VideoSendStream* FakeCall::CreateVideoSendStream(
+ webrtc::VideoSendStream::Config config,
+ webrtc::VideoEncoderConfig encoder_config) {
+ FakeVideoSendStream* fake_stream =
+ new FakeVideoSendStream(std::move(config), std::move(encoder_config));
+ video_send_streams_.push_back(fake_stream);
+ ++num_created_send_streams_;
+ return fake_stream;
+}
+
+void FakeCall::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
+ auto it = std::find(video_send_streams_.begin(),
+ video_send_streams_.end(),
+ static_cast<FakeVideoSendStream*>(send_stream));
+ if (it == video_send_streams_.end()) {
+ ADD_FAILURE() << "DestroyVideoSendStream called with unknown parameter.";
+ } else {
+ delete *it;
+ video_send_streams_.erase(it);
+ }
+}
+
+webrtc::VideoReceiveStream* FakeCall::CreateVideoReceiveStream(
+ webrtc::VideoReceiveStream::Config config) {
+ video_receive_streams_.push_back(
+ new FakeVideoReceiveStream(std::move(config)));
+ ++num_created_receive_streams_;
+ return video_receive_streams_.back();
+}
+
+void FakeCall::DestroyVideoReceiveStream(
+ webrtc::VideoReceiveStream* receive_stream) {
+ auto it = std::find(video_receive_streams_.begin(),
+ video_receive_streams_.end(),
+ static_cast<FakeVideoReceiveStream*>(receive_stream));
+ if (it == video_receive_streams_.end()) {
+ ADD_FAILURE() << "DestroyVideoReceiveStream called with unknown parameter.";
+ } else {
+ delete *it;
+ video_receive_streams_.erase(it);
+ }
+}
+
+webrtc::FlexfecReceiveStream* FakeCall::CreateFlexfecReceiveStream(
+ const webrtc::FlexfecReceiveStream::Config& config) {
+ FakeFlexfecReceiveStream* fake_stream = new FakeFlexfecReceiveStream(config);
+ flexfec_receive_streams_.push_back(fake_stream);
+ ++num_created_receive_streams_;
+ return fake_stream;
+}
+
+void FakeCall::DestroyFlexfecReceiveStream(
+ webrtc::FlexfecReceiveStream* receive_stream) {
+ auto it = std::find(flexfec_receive_streams_.begin(),
+ flexfec_receive_streams_.end(),
+ static_cast<FakeFlexfecReceiveStream*>(receive_stream));
+ if (it == flexfec_receive_streams_.end()) {
+ ADD_FAILURE()
+ << "DestroyFlexfecReceiveStream called with unknown parameter.";
+ } else {
+ delete *it;
+ flexfec_receive_streams_.erase(it);
+ }
+}
+
+webrtc::PacketReceiver* FakeCall::Receiver() {
+ return this;
+}
+
+FakeCall::DeliveryStatus FakeCall::DeliverPacket(
+ webrtc::MediaType media_type,
+ const uint8_t* packet,
+ size_t length,
+ const webrtc::PacketTime& packet_time) {
+ EXPECT_GE(length, 12u);
+ RTC_DCHECK(media_type == webrtc::MediaType::AUDIO ||
+ media_type == webrtc::MediaType::VIDEO);
+
+ uint32_t ssrc;
+ if (!GetRtpSsrc(packet, length, &ssrc))
+ return DELIVERY_PACKET_ERROR;
+
+ if (media_type == webrtc::MediaType::VIDEO) {
+ for (auto receiver : video_receive_streams_) {
+ if (receiver->GetConfig().rtp.remote_ssrc == ssrc)
+ return DELIVERY_OK;
+ }
+ }
+ if (media_type == webrtc::MediaType::AUDIO) {
+ for (auto receiver : audio_receive_streams_) {
+ if (receiver->GetConfig().rtp.remote_ssrc == ssrc) {
+ receiver->DeliverRtp(packet, length, packet_time);
+ return DELIVERY_OK;
+ }
+ }
+ }
+ return DELIVERY_UNKNOWN_SSRC;
+}
+
+void FakeCall::SetStats(const webrtc::Call::Stats& stats) {
+ stats_ = stats;
+}
+
+int FakeCall::GetNumCreatedSendStreams() const {
+ return num_created_send_streams_;
+}
+
+int FakeCall::GetNumCreatedReceiveStreams() const {
+ return num_created_receive_streams_;
+}
+
+webrtc::Call::Stats FakeCall::GetStats() const {
+ return stats_;
+}
+
+void FakeCall::SetBitrateConfig(
+ const webrtc::Call::Config::BitrateConfig& bitrate_config) {
+ config_.bitrate_config = bitrate_config;
+}
+
+void FakeCall::SetBitrateConfigMask(
+ const webrtc::Call::Config::BitrateConfigMask& mask) {
+ // TODO(zstein): not implemented
+}
+
+void FakeCall::SetBitrateAllocationStrategy(
+ std::unique_ptr<rtc::BitrateAllocationStrategy>
+ bitrate_allocation_strategy) {
+ // TODO(alexnarest): not implemented
+}
+
+void FakeCall::SignalChannelNetworkState(webrtc::MediaType media,
+ webrtc::NetworkState state) {
+ switch (media) {
+ case webrtc::MediaType::AUDIO:
+ audio_network_state_ = state;
+ break;
+ case webrtc::MediaType::VIDEO:
+ video_network_state_ = state;
+ break;
+ case webrtc::MediaType::DATA:
+ case webrtc::MediaType::ANY:
+ ADD_FAILURE()
+ << "SignalChannelNetworkState called with unknown parameter.";
+ }
+}
+
+void FakeCall::OnTransportOverheadChanged(webrtc::MediaType media,
+ int transport_overhead_per_packet) {
+ switch (media) {
+ case webrtc::MediaType::AUDIO:
+ audio_transport_overhead_ = transport_overhead_per_packet;
+ break;
+ case webrtc::MediaType::VIDEO:
+ video_transport_overhead_ = transport_overhead_per_packet;
+ break;
+ case webrtc::MediaType::DATA:
+ case webrtc::MediaType::ANY:
+ ADD_FAILURE()
+ << "SignalChannelNetworkState called with unknown parameter.";
+ }
+}
+
+void FakeCall::OnSentPacket(const rtc::SentPacket& sent_packet) {
+ last_sent_packet_ = sent_packet;
+ if (sent_packet.packet_id >= 0) {
+ last_sent_nonnegative_packet_id_ = sent_packet.packet_id;
+ }
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/fakewebrtccall.h b/third_party/libwebrtc/webrtc/media/engine/fakewebrtccall.h
new file mode 100644
index 0000000000..e598e9014a
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/fakewebrtccall.h
@@ -0,0 +1,333 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains fake implementations, for use in unit tests, of the
+// following classes:
+//
+// webrtc::Call
+// webrtc::AudioSendStream
+// webrtc::AudioReceiveStream
+// webrtc::VideoSendStream
+// webrtc::VideoReceiveStream
+
+#ifndef MEDIA_ENGINE_FAKEWEBRTCCALL_H_
+#define MEDIA_ENGINE_FAKEWEBRTCCALL_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/video/video_frame.h"
+#include "call/audio_receive_stream.h"
+#include "call/audio_send_stream.h"
+#include "call/call.h"
+#include "call/flexfec_receive_stream.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "rtc_base/buffer.h"
+#include "call/video_receive_stream.h"
+#include "call/video_send_stream.h"
+
+namespace cricket {
+class FakeAudioSendStream final : public webrtc::AudioSendStream {
+ public:
+ struct TelephoneEvent {
+ int payload_type = -1;
+ int payload_frequency = -1;
+ int event_code = 0;
+ int duration_ms = 0;
+ };
+
+ explicit FakeAudioSendStream(
+ int id, const webrtc::AudioSendStream::Config& config);
+
+ int id() const { return id_; }
+ const webrtc::AudioSendStream::Config& GetConfig() const override;
+ void SetStats(const webrtc::AudioSendStream::Stats& stats);
+ TelephoneEvent GetLatestTelephoneEvent() const;
+ bool IsSending() const { return sending_; }
+ bool muted() const { return muted_; }
+
+ private:
+ // webrtc::AudioSendStream implementation.
+ void Reconfigure(const webrtc::AudioSendStream::Config& config) override;
+
+ void Start() override { sending_ = true; }
+ void Stop() override { sending_ = false; }
+
+ bool SendTelephoneEvent(int payload_type, int payload_frequency, int event,
+ int duration_ms) override;
+ void SetMuted(bool muted) override;
+ webrtc::AudioSendStream::Stats GetStats() const override;
+ webrtc::AudioSendStream::Stats GetStats(
+ bool has_remote_tracks) const override;
+
+ int id_ = -1;
+ TelephoneEvent latest_telephone_event_;
+ webrtc::AudioSendStream::Config config_;
+ webrtc::AudioSendStream::Stats stats_;
+ bool sending_ = false;
+ bool muted_ = false;
+};
+
+class FakeAudioReceiveStream final : public webrtc::AudioReceiveStream {
+ public:
+ explicit FakeAudioReceiveStream(
+ int id, const webrtc::AudioReceiveStream::Config& config);
+
+ int id() const { return id_; }
+ const webrtc::AudioReceiveStream::Config& GetConfig() const;
+ void SetStats(const webrtc::AudioReceiveStream::Stats& stats);
+ int received_packets() const { return received_packets_; }
+ bool VerifyLastPacket(const uint8_t* data, size_t length) const;
+ const webrtc::AudioSinkInterface* sink() const { return sink_.get(); }
+ float gain() const { return gain_; }
+ bool DeliverRtp(const uint8_t* packet,
+ size_t length,
+ const webrtc::PacketTime& packet_time);
+ bool started() const { return started_; }
+
+ private:
+ // webrtc::AudioReceiveStream implementation.
+ void Start() override { started_ = true; }
+ void Stop() override { started_ = false; }
+
+ webrtc::AudioReceiveStream::Stats GetStats() const override;
+ int GetOutputLevel() const override { return 0; }
+ void SetSink(std::unique_ptr<webrtc::AudioSinkInterface> sink) override;
+ void SetGain(float gain) override;
+ std::vector<webrtc::RtpSource> GetSources() const override {
+ return std::vector<webrtc::RtpSource>();
+ }
+
+ int id_ = -1;
+ webrtc::AudioReceiveStream::Config config_;
+ webrtc::AudioReceiveStream::Stats stats_;
+ int received_packets_ = 0;
+ std::unique_ptr<webrtc::AudioSinkInterface> sink_;
+ float gain_ = 1.0f;
+ rtc::Buffer last_packet_;
+ bool started_ = false;
+};
+
+class FakeVideoSendStream final
+ : public webrtc::VideoSendStream,
+ public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ FakeVideoSendStream(webrtc::VideoSendStream::Config config,
+ webrtc::VideoEncoderConfig encoder_config);
+ ~FakeVideoSendStream() override;
+ const webrtc::VideoSendStream::Config& GetConfig() const;
+ const webrtc::VideoEncoderConfig& GetEncoderConfig() const;
+ const std::vector<webrtc::VideoStream>& GetVideoStreams() const;
+
+ bool IsSending() const;
+ bool GetVp8Settings(webrtc::VideoCodecVP8* settings) const;
+ bool GetVp9Settings(webrtc::VideoCodecVP9* settings) const;
+
+ int GetNumberOfSwappedFrames() const;
+ int GetLastWidth() const;
+ int GetLastHeight() const;
+ int64_t GetLastTimestamp() const;
+ void SetStats(const webrtc::VideoSendStream::Stats& stats);
+ int num_encoder_reconfigurations() const {
+ return num_encoder_reconfigurations_;
+ }
+
+ void EnableEncodedFrameRecording(const std::vector<rtc::PlatformFile>& files,
+ size_t byte_limit) override;
+
+ bool resolution_scaling_enabled() const {
+ return resolution_scaling_enabled_;
+ }
+ bool framerate_scaling_enabled() const { return framerate_scaling_enabled_; }
+ void InjectVideoSinkWants(const rtc::VideoSinkWants& wants);
+
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source() const {
+ return source_;
+ }
+
+ private:
+ // rtc::VideoSinkInterface<VideoFrame> implementation.
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+
+ // webrtc::VideoSendStream implementation.
+ void Start() override;
+ void Stop() override;
+ void SetSource(rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
+ const webrtc::VideoSendStream::DegradationPreference&
+ degradation_preference) override;
+ webrtc::VideoSendStream::Stats GetStats() override;
+ void ReconfigureVideoEncoder(webrtc::VideoEncoderConfig config) override;
+
+ bool sending_;
+ webrtc::VideoSendStream::Config config_;
+ webrtc::VideoEncoderConfig encoder_config_;
+ std::vector<webrtc::VideoStream> video_streams_;
+ rtc::VideoSinkWants sink_wants_;
+
+ bool codec_settings_set_;
+ union VpxSettings {
+ webrtc::VideoCodecVP8 vp8;
+ webrtc::VideoCodecVP9 vp9;
+ } vpx_settings_;
+ bool resolution_scaling_enabled_;
+ bool framerate_scaling_enabled_;
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source_;
+ int num_swapped_frames_;
+ rtc::Optional<webrtc::VideoFrame> last_frame_;
+ webrtc::VideoSendStream::Stats stats_;
+ int num_encoder_reconfigurations_ = 0;
+};
+
+class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream {
+ public:
+ explicit FakeVideoReceiveStream(webrtc::VideoReceiveStream::Config config);
+
+ const webrtc::VideoReceiveStream::Config& GetConfig() const;
+
+ bool IsReceiving() const;
+
+ void InjectFrame(const webrtc::VideoFrame& frame);
+
+ void SetStats(const webrtc::VideoReceiveStream::Stats& stats);
+
+ void EnableEncodedFrameRecording(rtc::PlatformFile file,
+ size_t byte_limit) override;
+
+ void AddSecondarySink(webrtc::RtpPacketSinkInterface* sink) override;
+ void RemoveSecondarySink(const webrtc::RtpPacketSinkInterface* sink) override;
+
+ private:
+ // webrtc::VideoReceiveStream implementation.
+ void Start() override;
+ void Stop() override;
+
+ webrtc::VideoReceiveStream::Stats GetStats() const override;
+
+ webrtc::VideoReceiveStream::Config config_;
+ bool receiving_;
+ webrtc::VideoReceiveStream::Stats stats_;
+};
+
+class FakeFlexfecReceiveStream final : public webrtc::FlexfecReceiveStream {
+ public:
+ explicit FakeFlexfecReceiveStream(
+ const webrtc::FlexfecReceiveStream::Config& config);
+
+ const webrtc::FlexfecReceiveStream::Config& GetConfig() const override;
+
+ private:
+ webrtc::FlexfecReceiveStream::Stats GetStats() const override;
+
+ void OnRtpPacket(const webrtc::RtpPacketReceived& packet) override;
+
+ webrtc::FlexfecReceiveStream::Config config_;
+};
+
+class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver {
+ public:
+ explicit FakeCall(const webrtc::Call::Config& config);
+ ~FakeCall() override;
+
+ webrtc::Call::Config GetConfig() const;
+ const std::vector<FakeVideoSendStream*>& GetVideoSendStreams();
+ const std::vector<FakeVideoReceiveStream*>& GetVideoReceiveStreams();
+
+ const std::vector<FakeAudioSendStream*>& GetAudioSendStreams();
+ const FakeAudioSendStream* GetAudioSendStream(uint32_t ssrc);
+ const std::vector<FakeAudioReceiveStream*>& GetAudioReceiveStreams();
+ const FakeAudioReceiveStream* GetAudioReceiveStream(uint32_t ssrc);
+
+ const std::vector<FakeFlexfecReceiveStream*>& GetFlexfecReceiveStreams();
+
+ rtc::SentPacket last_sent_packet() const { return last_sent_packet_; }
+
+ // This is useful if we care about the last media packet (with id populated)
+ // but not the last ICE packet (with -1 ID).
+ int last_sent_nonnegative_packet_id() const {
+ return last_sent_nonnegative_packet_id_;
+ }
+
+ webrtc::NetworkState GetNetworkState(webrtc::MediaType media) const;
+ int GetNumCreatedSendStreams() const;
+ int GetNumCreatedReceiveStreams() const;
+ void SetStats(const webrtc::Call::Stats& stats);
+
+ private:
+ webrtc::AudioSendStream* CreateAudioSendStream(
+ const webrtc::AudioSendStream::Config& config) override;
+ void DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) override;
+
+ webrtc::AudioReceiveStream* CreateAudioReceiveStream(
+ const webrtc::AudioReceiveStream::Config& config) override;
+ void DestroyAudioReceiveStream(
+ webrtc::AudioReceiveStream* receive_stream) override;
+
+ webrtc::VideoSendStream* CreateVideoSendStream(
+ webrtc::VideoSendStream::Config config,
+ webrtc::VideoEncoderConfig encoder_config) override;
+ void DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) override;
+
+ webrtc::VideoReceiveStream* CreateVideoReceiveStream(
+ webrtc::VideoReceiveStream::Config config) override;
+ void DestroyVideoReceiveStream(
+ webrtc::VideoReceiveStream* receive_stream) override;
+
+ webrtc::FlexfecReceiveStream* CreateFlexfecReceiveStream(
+ const webrtc::FlexfecReceiveStream::Config& config) override;
+ void DestroyFlexfecReceiveStream(
+ webrtc::FlexfecReceiveStream* receive_stream) override;
+
+ webrtc::PacketReceiver* Receiver() override;
+
+ DeliveryStatus DeliverPacket(webrtc::MediaType media_type,
+ const uint8_t* packet,
+ size_t length,
+ const webrtc::PacketTime& packet_time) override;
+
+ webrtc::Call::Stats GetStats() const override;
+
+ void SetBitrateConfig(
+ const webrtc::Call::Config::BitrateConfig& bitrate_config) override;
+ void SetBitrateConfigMask(
+ const webrtc::Call::Config::BitrateConfigMask& mask) override;
+ void SetBitrateAllocationStrategy(
+ std::unique_ptr<rtc::BitrateAllocationStrategy>
+ bitrate_allocation_strategy) override;
+ void OnNetworkRouteChanged(const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) override {}
+ void SignalChannelNetworkState(webrtc::MediaType media,
+ webrtc::NetworkState state) override;
+ void OnTransportOverheadChanged(webrtc::MediaType media,
+ int transport_overhead_per_packet) override;
+ void OnSentPacket(const rtc::SentPacket& sent_packet) override;
+
+ webrtc::Call::Config config_;
+ webrtc::NetworkState audio_network_state_;
+ webrtc::NetworkState video_network_state_;
+ rtc::SentPacket last_sent_packet_;
+ int last_sent_nonnegative_packet_id_ = -1;
+ int next_stream_id_ = 665;
+ webrtc::Call::Stats stats_;
+ std::vector<FakeVideoSendStream*> video_send_streams_;
+ std::vector<FakeAudioSendStream*> audio_send_streams_;
+ std::vector<FakeVideoReceiveStream*> video_receive_streams_;
+ std::vector<FakeAudioReceiveStream*> audio_receive_streams_;
+ std::vector<FakeFlexfecReceiveStream*> flexfec_receive_streams_;
+
+ int num_created_send_streams_;
+ int num_created_receive_streams_;
+
+ int audio_transport_overhead_;
+ int video_transport_overhead_;
+};
+
+} // namespace cricket
+#endif // MEDIA_ENGINE_FAKEWEBRTCCALL_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/fakewebrtcdeviceinfo.h b/third_party/libwebrtc/webrtc/media/engine/fakewebrtcdeviceinfo.h
new file mode 100644
index 0000000000..51a5b8e50c
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/fakewebrtcdeviceinfo.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_FAKEWEBRTCDEVICEINFO_H_
+#define MEDIA_ENGINE_FAKEWEBRTCDEVICEINFO_H_
+
+#include <string>
+#include <vector>
+
+#include "media/engine/webrtcvideocapturer.h"
+#include "rtc_base/stringutils.h"
+
+// Fake class for mocking out webrtc::VideoCaptureModule::DeviceInfo.
+class FakeWebRtcDeviceInfo : public webrtc::VideoCaptureModule::DeviceInfo {
+ public:
+ struct Device {
+ Device(const std::string& n, const std::string& i) : name(n), id(i) {}
+ std::string name;
+ std::string id;
+ std::string product;
+ std::vector<webrtc::VideoCaptureCapability> caps;
+ };
+ FakeWebRtcDeviceInfo() {}
+ void AddDevice(const std::string& device_name, const std::string& device_id) {
+ devices_.push_back(Device(device_name, device_id));
+ }
+ void AddCapability(const std::string& device_id,
+ const webrtc::VideoCaptureCapability& cap) {
+ Device* dev = GetDeviceById(
+ reinterpret_cast<const char*>(device_id.c_str()));
+ if (!dev) return;
+ dev->caps.push_back(cap);
+ }
+ virtual uint32_t NumberOfDevices() {
+ return static_cast<int>(devices_.size());
+ }
+ virtual int32_t GetDeviceName(uint32_t device_num,
+ char* device_name,
+ uint32_t device_name_len,
+ char* device_id,
+ uint32_t device_id_len,
+ char* product_id,
+ uint32_t product_id_len) {
+ Device* dev = GetDeviceByIndex(device_num);
+ if (!dev) return -1;
+ rtc::strcpyn(reinterpret_cast<char*>(device_name), device_name_len,
+ dev->name.c_str());
+ rtc::strcpyn(reinterpret_cast<char*>(device_id), device_id_len,
+ dev->id.c_str());
+ if (product_id) {
+ rtc::strcpyn(reinterpret_cast<char*>(product_id), product_id_len,
+ dev->product.c_str());
+ }
+ return 0;
+ }
+ virtual int32_t NumberOfCapabilities(const char* device_id) {
+ Device* dev = GetDeviceById(device_id);
+ if (!dev) return -1;
+ return static_cast<int32_t>(dev->caps.size());
+ }
+ virtual int32_t GetCapability(const char* device_id,
+ const uint32_t device_cap_num,
+ webrtc::VideoCaptureCapability& cap) {
+ Device* dev = GetDeviceById(device_id);
+ if (!dev) return -1;
+ if (device_cap_num >= dev->caps.size()) return -1;
+ cap = dev->caps[device_cap_num];
+ return 0;
+ }
+ virtual int32_t GetOrientation(const char* device_id,
+ webrtc::VideoRotation& rotation) {
+ return -1; // not implemented
+ }
+ virtual int32_t GetBestMatchedCapability(
+ const char* device_id,
+ const webrtc::VideoCaptureCapability& requested,
+ webrtc::VideoCaptureCapability& resulting) {
+ return -1; // not implemented
+ }
+ virtual int32_t DisplayCaptureSettingsDialogBox(
+ const char* device_id, const char* dialog_title,
+ void* parent, uint32_t x, uint32_t y) {
+ return -1; // not implemented
+ }
+
+ Device* GetDeviceByIndex(size_t num) {
+ return (num < devices_.size()) ? &devices_[num] : NULL;
+ }
+ Device* GetDeviceById(const char* device_id) {
+ for (size_t i = 0; i < devices_.size(); ++i) {
+ if (devices_[i].id == reinterpret_cast<const char*>(device_id)) {
+ return &devices_[i];
+ }
+ }
+ return NULL;
+ }
+
+ private:
+ std::vector<Device> devices_;
+};
+
+#endif // MEDIA_ENGINE_FAKEWEBRTCDEVICEINFO_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/fakewebrtcvcmfactory.h b/third_party/libwebrtc/webrtc/media/engine/fakewebrtcvcmfactory.h
new file mode 100644
index 0000000000..70931e129a
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/fakewebrtcvcmfactory.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_FAKEWEBRTCVCMFACTORY_H_
+#define MEDIA_ENGINE_FAKEWEBRTCVCMFACTORY_H_
+
+#include <vector>
+
+#include "media/engine/fakewebrtcdeviceinfo.h"
+#include "media/engine/fakewebrtcvideocapturemodule.h"
+#include "media/engine/webrtcvideocapturer.h"
+
+// Factory class to allow the fakes above to be injected into
+// WebRtcVideoCapturer.
+class FakeWebRtcVcmFactory : public cricket::WebRtcVcmFactoryInterface {
+ public:
+ virtual rtc::scoped_refptr<webrtc::VideoCaptureModule> Create(
+ const char* device_id) {
+ if (!device_info.GetDeviceById(device_id)) return NULL;
+ rtc::scoped_refptr<FakeWebRtcVideoCaptureModule> module(
+ new rtc::RefCountedObject<FakeWebRtcVideoCaptureModule>(this));
+ modules.push_back(module);
+ return module;
+ }
+ virtual webrtc::VideoCaptureModule::DeviceInfo* CreateDeviceInfo() {
+ return &device_info;
+ }
+ virtual void DestroyDeviceInfo(webrtc::VideoCaptureModule::DeviceInfo* info) {
+ }
+ void OnDestroyed(webrtc::VideoCaptureModule* module) {
+ std::remove(modules.begin(), modules.end(), module);
+ }
+ FakeWebRtcDeviceInfo device_info;
+ std::vector<rtc::scoped_refptr<FakeWebRtcVideoCaptureModule>> modules;
+};
+
+FakeWebRtcVideoCaptureModule::~FakeWebRtcVideoCaptureModule() {
+ if (factory_)
+ factory_->OnDestroyed(this);
+}
+
+#endif // MEDIA_ENGINE_FAKEWEBRTCVCMFACTORY_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/fakewebrtcvideocapturemodule.h b/third_party/libwebrtc/webrtc/media/engine/fakewebrtcvideocapturemodule.h
new file mode 100644
index 0000000000..bf23a11277
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/fakewebrtcvideocapturemodule.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_FAKEWEBRTCVIDEOCAPTUREMODULE_H_
+#define MEDIA_ENGINE_FAKEWEBRTCVIDEOCAPTUREMODULE_H_
+
+#include <vector>
+
+#include "api/video/i420_buffer.h"
+#include "media/base/testutils.h"
+#include "media/engine/webrtcvideocapturer.h"
+
+class FakeWebRtcVcmFactory;
+
+// Fake class for mocking out webrtc::VideoCaptureModule.
+class FakeWebRtcVideoCaptureModule : public webrtc::VideoCaptureModule {
+ public:
+ explicit FakeWebRtcVideoCaptureModule(FakeWebRtcVcmFactory* factory)
+ : factory_(factory), callback_(NULL), running_(false) {}
+ ~FakeWebRtcVideoCaptureModule();
+ void RegisterCaptureDataCallback(
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* callback) override {
+ callback_ = callback;
+ }
+ void DeRegisterCaptureDataCallback() override { callback_ = NULL; }
+ int32_t StartCapture(const webrtc::VideoCaptureCapability& cap) override {
+ if (running_) return -1;
+ cap_ = cap;
+ running_ = true;
+ return 0;
+ }
+ int32_t StopCapture() override {
+ running_ = false;
+ return 0;
+ }
+ const char* CurrentDeviceName() const override {
+ return NULL; // not implemented
+ }
+ bool CaptureStarted() override { return running_; }
+ int32_t CaptureSettings(webrtc::VideoCaptureCapability& settings) override {
+ if (!running_) return -1;
+ settings = cap_;
+ return 0;
+ }
+
+ int32_t SetCaptureRotation(webrtc::VideoRotation rotation) override {
+ return -1; // not implemented
+ }
+ bool SetApplyRotation(bool enable) override {
+ return true; // ignored
+ }
+ bool GetApplyRotation() override {
+ return true; // Rotation compensation is turned on.
+ }
+ void SendFrame(int w, int h) {
+ if (!running_) return;
+
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer =
+ webrtc::I420Buffer::Create(w, h);
+ // Initialize memory to satisfy DrMemory tests. See
+ // https://bugs.chromium.org/p/libyuv/issues/detail?id=377
+ buffer->InitializeData();
+ if (callback_) {
+ callback_->OnFrame(
+ webrtc::VideoFrame(buffer, 0, 0, webrtc::kVideoRotation_0));
+ }
+ }
+
+ const webrtc::VideoCaptureCapability& cap() const {
+ return cap_;
+ }
+
+ private:
+ FakeWebRtcVcmFactory* factory_;
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* callback_;
+ bool running_;
+ webrtc::VideoCaptureCapability cap_;
+};
+
+#endif // MEDIA_ENGINE_FAKEWEBRTCVIDEOCAPTUREMODULE_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/fakewebrtcvideoengine.h b/third_party/libwebrtc/webrtc/media/engine/fakewebrtcvideoengine.h
new file mode 100644
index 0000000000..2153f6be38
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/fakewebrtcvideoengine.h
@@ -0,0 +1,260 @@
+/*
+ * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_FAKEWEBRTCVIDEOENGINE_H_
+#define MEDIA_ENGINE_FAKEWEBRTCVIDEOENGINE_H_
+
+#include <map>
+#include <set>
+#include <vector>
+#include <string>
+
+#include "api/video_codecs/video_decoder.h"
+#include "api/video_codecs/video_encoder.h"
+#include "media/base/codec.h"
+#include "media/engine/webrtcvideodecoderfactory.h"
+#include "media/engine/webrtcvideoencoderfactory.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/basictypes.h"
+#include "rtc_base/criticalsection.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/stringutils.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace cricket {
+static const int kEventTimeoutMs = 10000;
+
+// Fake class for mocking out webrtc::VideoDecoder
+class FakeWebRtcVideoDecoder : public webrtc::VideoDecoder {
+ public:
+ FakeWebRtcVideoDecoder() : num_frames_received_(0) {}
+
+ virtual int32_t InitDecode(const webrtc::VideoCodec*, int32_t) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ virtual int32_t Decode(const webrtc::EncodedImage&,
+ bool,
+ const webrtc::RTPFragmentationHeader*,
+ const webrtc::CodecSpecificInfo*,
+ int64_t) {
+ num_frames_received_++;
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ virtual int32_t RegisterDecodeCompleteCallback(
+ webrtc::DecodedImageCallback*) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ virtual int32_t Release() { return WEBRTC_VIDEO_CODEC_OK; }
+
+ int GetNumFramesReceived() const {
+ return num_frames_received_;
+ }
+
+ private:
+ int num_frames_received_;
+};
+
+// Fake class for mocking out WebRtcVideoDecoderFactory.
+class FakeWebRtcVideoDecoderFactory : public WebRtcVideoDecoderFactory {
+ public:
+ FakeWebRtcVideoDecoderFactory()
+ : num_created_decoders_(0) {
+ }
+
+ virtual webrtc::VideoDecoder* CreateVideoDecoder(
+ webrtc::VideoCodecType type) {
+ if (supported_codec_types_.count(type) == 0) {
+ return NULL;
+ }
+ FakeWebRtcVideoDecoder* decoder = new FakeWebRtcVideoDecoder();
+ decoders_.push_back(decoder);
+ num_created_decoders_++;
+ return decoder;
+ }
+
+ virtual webrtc::VideoDecoder* CreateVideoDecoderWithParams(
+ webrtc::VideoCodecType type,
+ VideoDecoderParams params) {
+ params_.push_back(params);
+ return CreateVideoDecoder(type);
+ }
+
+ virtual void DestroyVideoDecoder(webrtc::VideoDecoder* decoder) {
+ decoders_.erase(
+ std::remove(decoders_.begin(), decoders_.end(), decoder),
+ decoders_.end());
+ delete decoder;
+ }
+
+ void AddSupportedVideoCodecType(webrtc::VideoCodecType type) {
+ supported_codec_types_.insert(type);
+ }
+
+ int GetNumCreatedDecoders() {
+ return num_created_decoders_;
+ }
+
+ const std::vector<FakeWebRtcVideoDecoder*>& decoders() {
+ return decoders_;
+ }
+
+ const std::vector<VideoDecoderParams>& params() { return params_; }
+
+ private:
+ std::set<webrtc::VideoCodecType> supported_codec_types_;
+ std::vector<FakeWebRtcVideoDecoder*> decoders_;
+ int num_created_decoders_;
+ std::vector<VideoDecoderParams> params_;
+};
+
+// Fake class for mocking out webrtc::VideoEnoder
+class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder {
+ public:
+ FakeWebRtcVideoEncoder()
+ : init_encode_event_(false, false), num_frames_encoded_(0) {}
+
+ int32_t InitEncode(const webrtc::VideoCodec* codecSettings,
+ int32_t numberOfCores,
+ size_t maxPayloadSize) override {
+ rtc::CritScope lock(&crit_);
+ codec_settings_ = *codecSettings;
+ init_encode_event_.Set();
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ bool WaitForInitEncode() { return init_encode_event_.Wait(kEventTimeoutMs); }
+
+ webrtc::VideoCodec GetCodecSettings() {
+ rtc::CritScope lock(&crit_);
+ return codec_settings_;
+ }
+
+ int32_t Encode(const webrtc::VideoFrame& inputImage,
+ const webrtc::CodecSpecificInfo* codecSpecificInfo,
+ const std::vector<webrtc::FrameType>* frame_types) override {
+ rtc::CritScope lock(&crit_);
+ ++num_frames_encoded_;
+ init_encode_event_.Set();
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* callback) override {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t Release() override { return WEBRTC_VIDEO_CODEC_OK; }
+
+ int32_t SetChannelParameters(uint32_t packetLoss, int64_t rtt) override {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t SetRateAllocation(const webrtc::BitrateAllocation& allocation,
+ uint32_t framerate) override {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int GetNumEncodedFrames() {
+ rtc::CritScope lock(&crit_);
+ return num_frames_encoded_;
+ }
+
+ private:
+ rtc::CriticalSection crit_;
+ rtc::Event init_encode_event_;
+ int num_frames_encoded_ RTC_GUARDED_BY(crit_);
+ webrtc::VideoCodec codec_settings_ RTC_GUARDED_BY(crit_);
+};
+
+// Fake class for mocking out WebRtcVideoEncoderFactory.
+class FakeWebRtcVideoEncoderFactory : public WebRtcVideoEncoderFactory {
+ public:
+ FakeWebRtcVideoEncoderFactory()
+ : created_video_encoder_event_(false, false),
+ num_created_encoders_(0),
+ encoders_have_internal_sources_(false) {}
+
+ webrtc::VideoEncoder* CreateVideoEncoder(
+ const cricket::VideoCodec& codec) override {
+ rtc::CritScope lock(&crit_);
+ if (!FindMatchingCodec(codecs_, codec))
+ return nullptr;
+ FakeWebRtcVideoEncoder* encoder = new FakeWebRtcVideoEncoder();
+ encoders_.push_back(encoder);
+ num_created_encoders_++;
+ created_video_encoder_event_.Set();
+ return encoder;
+ }
+
+ bool WaitForCreatedVideoEncoders(int num_encoders) {
+ int64_t start_offset_ms = rtc::TimeMillis();
+ int64_t wait_time = kEventTimeoutMs;
+ do {
+ if (GetNumCreatedEncoders() >= num_encoders)
+ return true;
+ wait_time = kEventTimeoutMs - (rtc::TimeMillis() - start_offset_ms);
+ } while (wait_time > 0 && created_video_encoder_event_.Wait(wait_time));
+ return false;
+ }
+
+ void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override {
+ rtc::CritScope lock(&crit_);
+ encoders_.erase(
+ std::remove(encoders_.begin(), encoders_.end(), encoder),
+ encoders_.end());
+ delete encoder;
+ }
+
+ const std::vector<cricket::VideoCodec>& supported_codecs() const override {
+ return codecs_;
+ }
+
+ bool EncoderTypeHasInternalSource(
+ webrtc::VideoCodecType type) const override {
+ return encoders_have_internal_sources_;
+ }
+
+ void set_encoders_have_internal_sources(bool internal_source) {
+ encoders_have_internal_sources_ = internal_source;
+ }
+
+ void AddSupportedVideoCodec(const cricket::VideoCodec& codec) {
+ codecs_.push_back(codec);
+ }
+
+ void AddSupportedVideoCodecType(const std::string& name) {
+ codecs_.push_back(cricket::VideoCodec(name));
+ }
+
+ int GetNumCreatedEncoders() {
+ rtc::CritScope lock(&crit_);
+ return num_created_encoders_;
+ }
+
+ const std::vector<FakeWebRtcVideoEncoder*> encoders() {
+ rtc::CritScope lock(&crit_);
+ return encoders_;
+ }
+
+ private:
+ rtc::CriticalSection crit_;
+ rtc::Event created_video_encoder_event_;
+ std::vector<cricket::VideoCodec> codecs_;
+ std::vector<FakeWebRtcVideoEncoder*> encoders_ RTC_GUARDED_BY(crit_);
+ int num_created_encoders_ RTC_GUARDED_BY(crit_);
+ bool encoders_have_internal_sources_;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_FAKEWEBRTCVIDEOENGINE_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/fakewebrtcvoiceengine.h b/third_party/libwebrtc/webrtc/media/engine/fakewebrtcvoiceengine.h
new file mode 100644
index 0000000000..444afdd69f
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/fakewebrtcvoiceengine.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_FAKEWEBRTCVOICEENGINE_H_
+#define MEDIA_ENGINE_FAKEWEBRTCVOICEENGINE_H_
+
+#include <map>
+#include <vector>
+
+#include "media/engine/webrtcvoe.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace voe {
+class TransmitMixer;
+} // namespace voe
+} // namespace webrtc
+
+namespace cricket {
+
+#define WEBRTC_CHECK_CHANNEL(channel) \
+ if (channels_.find(channel) == channels_.end()) return -1;
+
+#define WEBRTC_STUB(method, args) \
+ int method args override { return 0; }
+
+#define WEBRTC_FUNC(method, args) int method args override
+
+class FakeWebRtcVoiceEngine : public webrtc::VoEBase {
+ public:
+ struct Channel {
+ std::vector<webrtc::CodecInst> recv_codecs;
+ size_t neteq_capacity = 0;
+ bool neteq_fast_accelerate = false;
+ };
+
+ explicit FakeWebRtcVoiceEngine(webrtc::voe::TransmitMixer* transmit_mixer)
+ : transmit_mixer_(transmit_mixer) {}
+ ~FakeWebRtcVoiceEngine() override {
+ RTC_CHECK(channels_.empty());
+ }
+
+ bool IsInited() const { return inited_; }
+ int GetLastChannel() const { return last_channel_; }
+ int GetNumChannels() const { return static_cast<int>(channels_.size()); }
+ void set_fail_create_channel(bool fail_create_channel) {
+ fail_create_channel_ = fail_create_channel;
+ }
+
+ WEBRTC_STUB(Release, ());
+
+ // webrtc::VoEBase
+ WEBRTC_FUNC(Init,
+ (webrtc::AudioDeviceModule* adm,
+ webrtc::AudioProcessing* audioproc,
+ const rtc::scoped_refptr<webrtc::AudioDecoderFactory>&
+ decoder_factory)) {
+ inited_ = true;
+ return 0;
+ }
+ void Terminate() override {
+ inited_ = false;
+ }
+ webrtc::voe::TransmitMixer* transmit_mixer() override {
+ return transmit_mixer_;
+ }
+ WEBRTC_FUNC(CreateChannel, ()) {
+ return CreateChannel(webrtc::VoEBase::ChannelConfig());
+ }
+ WEBRTC_FUNC(CreateChannel, (const webrtc::VoEBase::ChannelConfig& config)) {
+ if (fail_create_channel_) {
+ return -1;
+ }
+ Channel* ch = new Channel();
+ ch->neteq_capacity = config.acm_config.neteq_config.max_packets_in_buffer;
+ ch->neteq_fast_accelerate =
+ config.acm_config.neteq_config.enable_fast_accelerate;
+ channels_[++last_channel_] = ch;
+ return last_channel_;
+ }
+ WEBRTC_FUNC(DeleteChannel, (int channel)) {
+ WEBRTC_CHECK_CHANNEL(channel);
+ delete channels_[channel];
+ channels_.erase(channel);
+ return 0;
+ }
+ WEBRTC_STUB(StartPlayout, (int channel));
+ WEBRTC_STUB(StartSend, (int channel));
+ WEBRTC_STUB(StopPlayout, (int channel));
+ WEBRTC_STUB(StopSend, (int channel));
+ WEBRTC_STUB(SetPlayout, (bool enable));
+ WEBRTC_STUB(SetRecording, (bool enable));
+
+ size_t GetNetEqCapacity() const {
+ auto ch = channels_.find(last_channel_);
+ RTC_DCHECK(ch != channels_.end());
+ return ch->second->neteq_capacity;
+ }
+ bool GetNetEqFastAccelerate() const {
+ auto ch = channels_.find(last_channel_);
+ RTC_CHECK(ch != channels_.end());
+ return ch->second->neteq_fast_accelerate;
+ }
+
+ private:
+ bool inited_ = false;
+ int last_channel_ = -1;
+ std::map<int, Channel*> channels_;
+ bool fail_create_channel_ = false;
+ webrtc::voe::TransmitMixer* transmit_mixer_ = nullptr;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(FakeWebRtcVoiceEngine);
+};
+
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_FAKEWEBRTCVOICEENGINE_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/internaldecoderfactory.cc b/third_party/libwebrtc/webrtc/media/engine/internaldecoderfactory.cc
new file mode 100644
index 0000000000..e8cecb79a4
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/internaldecoderfactory.cc
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/internaldecoderfactory.h"
+
+#include "media/base/mediaconstants.h"
+#include "modules/video_coding/codecs/h264/include/h264.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+std::vector<SdpVideoFormat> InternalDecoderFactory::GetSupportedFormats()
+ const {
+ std::vector<SdpVideoFormat> formats;
+ formats.push_back(SdpVideoFormat(cricket::kVp8CodecName));
+ if (VP9Decoder::IsSupported())
+ formats.push_back(SdpVideoFormat(cricket::kVp9CodecName));
+ for (const SdpVideoFormat& h264_format : SupportedH264Codecs())
+ formats.push_back(h264_format);
+ return formats;
+}
+
+std::unique_ptr<VideoDecoder> InternalDecoderFactory::CreateVideoDecoder(
+ const SdpVideoFormat& format) {
+ if (cricket::CodecNamesEq(format.name, cricket::kVp8CodecName))
+ return VP8Decoder::Create();
+
+ if (cricket::CodecNamesEq(format.name, cricket::kVp9CodecName)) {
+ RTC_DCHECK(VP9Decoder::IsSupported());
+ return VP9Decoder::Create();
+ }
+
+ if (cricket::CodecNamesEq(format.name, cricket::kH264CodecName))
+ return H264Decoder::Create();
+
+ RTC_LOG(LS_ERROR) << "Trying to create decoder for unsupported format";
+ return nullptr;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/internaldecoderfactory.h b/third_party/libwebrtc/webrtc/media/engine/internaldecoderfactory.h
new file mode 100644
index 0000000000..7420129600
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/internaldecoderfactory.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_INTERNALDECODERFACTORY_H_
+#define MEDIA_ENGINE_INTERNALDECODERFACTORY_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/video_codecs/video_decoder_factory.h"
+
+namespace webrtc {
+
+class InternalDecoderFactory : public VideoDecoderFactory {
+ public:
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override;
+ std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const SdpVideoFormat& format) override;
+};
+
+} // namespace webrtc
+
+#endif // MEDIA_ENGINE_INTERNALDECODERFACTORY_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/internaldecoderfactory_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/internaldecoderfactory_unittest.cc
new file mode 100644
index 0000000000..147293dcf7
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/internaldecoderfactory_unittest.cc
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/internaldecoderfactory.h"
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_decoder.h"
+#include "media/base/mediaconstants.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+TEST(InternalDecoderFactory, TestVP8) {
+ InternalDecoderFactory factory;
+ std::unique_ptr<VideoDecoder> decoder =
+ factory.CreateVideoDecoder(SdpVideoFormat(cricket::kVp8CodecName));
+ EXPECT_TRUE(decoder);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/internalencoderfactory.cc b/third_party/libwebrtc/webrtc/media/engine/internalencoderfactory.cc
new file mode 100644
index 0000000000..5f8479cfb8
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/internalencoderfactory.cc
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/internalencoderfactory.h"
+
+#include <utility>
+
+#include "modules/video_coding/codecs/h264/include/h264.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+std::vector<SdpVideoFormat> InternalEncoderFactory::GetSupportedFormats()
+ const {
+ std::vector<SdpVideoFormat> supported_codecs;
+ supported_codecs.push_back(SdpVideoFormat(cricket::kVp8CodecName));
+ if (webrtc::VP9Encoder::IsSupported())
+ supported_codecs.push_back(SdpVideoFormat(cricket::kVp9CodecName));
+
+ for (const webrtc::SdpVideoFormat& format : webrtc::SupportedH264Codecs())
+ supported_codecs.push_back(format);
+
+ return supported_codecs;
+}
+
+VideoEncoderFactory::CodecInfo InternalEncoderFactory::QueryVideoEncoder(
+ const SdpVideoFormat& format) const {
+ CodecInfo info;
+ info.is_hardware_accelerated = false;
+ info.has_internal_source = false;
+ return info;
+}
+
+std::unique_ptr<VideoEncoder> InternalEncoderFactory::CreateVideoEncoder(
+ const SdpVideoFormat& format) {
+ if (cricket::CodecNamesEq(format.name, cricket::kVp8CodecName))
+ return VP8Encoder::Create();
+
+ if (cricket::CodecNamesEq(format.name, cricket::kVp9CodecName))
+ return VP9Encoder::Create();
+
+ if (cricket::CodecNamesEq(format.name, cricket::kH264CodecName))
+ return H264Encoder::Create(cricket::VideoCodec(format));
+
+ RTC_LOG(LS_ERROR) << "Trying to created encoder of unsupported format "
+ << format.name;
+ return nullptr;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/internalencoderfactory.h b/third_party/libwebrtc/webrtc/media/engine/internalencoderfactory.h
new file mode 100644
index 0000000000..28ed3fc9a5
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/internalencoderfactory.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_INTERNALENCODERFACTORY_H_
+#define MEDIA_ENGINE_INTERNALENCODERFACTORY_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/video_codecs/video_encoder_factory.h"
+
+namespace webrtc {
+
+class InternalEncoderFactory : public VideoEncoderFactory {
+ public:
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override;
+
+ CodecInfo QueryVideoEncoder(const SdpVideoFormat& format) const override;
+
+ std::unique_ptr<VideoEncoder> CreateVideoEncoder(
+ const SdpVideoFormat& format) override;
+};
+
+} // namespace webrtc
+
+#endif // MEDIA_ENGINE_INTERNALENCODERFACTORY_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/nullwebrtcvideoengine.h b/third_party/libwebrtc/webrtc/media/engine/nullwebrtcvideoengine.h
new file mode 100644
index 0000000000..0ff997d7ad
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/nullwebrtcvideoengine.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_NULLWEBRTCVIDEOENGINE_H_
+#define MEDIA_ENGINE_NULLWEBRTCVIDEOENGINE_H_
+
+#include <vector>
+
+#include "media/base/mediachannel.h"
+#include "media/base/mediaengine.h"
+
+namespace webrtc {
+
+class Call;
+
+} // namespace webrtc
+
+
+namespace cricket {
+
+class VideoMediaChannel;
+class WebRtcVideoDecoderFactory;
+class WebRtcVideoEncoderFactory;
+
+// Video engine implementation that does nothing and can be used in
+// CompositeMediaEngine.
+class NullWebRtcVideoEngine {
+ public:
+ std::vector<VideoCodec> codecs() const { return std::vector<VideoCodec>(); }
+
+ RtpCapabilities GetCapabilities() const { return RtpCapabilities(); }
+
+ VideoMediaChannel* CreateChannel(webrtc::Call* call,
+ const MediaConfig& config,
+ const VideoOptions& options) {
+ return nullptr;
+ }
+};
+
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_NULLWEBRTCVIDEOENGINE_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/nullwebrtcvideoengine_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/nullwebrtcvideoengine_unittest.cc
new file mode 100644
index 0000000000..a53f773fec
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/nullwebrtcvideoengine_unittest.cc
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/nullwebrtcvideoengine.h"
+#include "media/engine/webrtcvoiceengine.h"
+#include "modules/audio_device/include/mock_audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "test/gtest.h"
+#include "test/mock_audio_decoder_factory.h"
+#include "test/mock_audio_encoder_factory.h"
+
+namespace cricket {
+
+class WebRtcMediaEngineNullVideo
+ : public CompositeMediaEngine<WebRtcVoiceEngine, NullWebRtcVideoEngine> {
+ public:
+ WebRtcMediaEngineNullVideo(
+ webrtc::AudioDeviceModule* adm,
+ const rtc::scoped_refptr<webrtc::AudioEncoderFactory>&
+ audio_encoder_factory,
+ const rtc::scoped_refptr<webrtc::AudioDecoderFactory>&
+ audio_decoder_factory)
+ : CompositeMediaEngine<WebRtcVoiceEngine, NullWebRtcVideoEngine>(
+ std::forward_as_tuple(adm,
+ audio_encoder_factory,
+ audio_decoder_factory,
+ nullptr,
+ webrtc::AudioProcessing::Create()),
+ std::forward_as_tuple()) {}
+};
+
+// Simple test to check if NullWebRtcVideoEngine implements the methods
+// required by CompositeMediaEngine.
+TEST(NullWebRtcVideoEngineTest, CheckInterface) {
+ testing::NiceMock<webrtc::test::MockAudioDeviceModule> adm;
+ WebRtcMediaEngineNullVideo engine(
+ &adm, webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
+ webrtc::MockAudioDecoderFactory::CreateUnusedFactory());
+ EXPECT_TRUE(engine.Init());
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/payload_type_mapper.cc b/third_party/libwebrtc/webrtc/media/engine/payload_type_mapper.cc
new file mode 100644
index 0000000000..1927f43a00
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/payload_type_mapper.cc
@@ -0,0 +1,150 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/payload_type_mapper.h"
+
+#include <utility>
+
+#include "api/audio_codecs/audio_format.h"
+#include "common_types.h" // NOLINT(build/include)
+#include "media/base/mediaconstants.h"
+
+namespace cricket {
+
+webrtc::SdpAudioFormat AudioCodecToSdpAudioFormat(const AudioCodec& ac) {
+ return webrtc::SdpAudioFormat(ac.name, ac.clockrate, ac.channels, ac.params);
+}
+
+PayloadTypeMapper::PayloadTypeMapper()
+ // RFC 3551 reserves payload type numbers in the range 96-127 exclusively
+ // for dynamic assignment. Once those are used up, it is recommended that
+ // payload types unassigned by the RFC are used for dynamic payload type
+ // mapping, before any static payload ids. At this point, we only support
+ // mapping within the exclusive range.
+ : next_unused_payload_type_(96),
+ max_payload_type_(127),
+ mappings_({
+ // Static payload type assignments according to RFC 3551.
+ {{"PCMU", 8000, 1}, 0},
+ {{"GSM", 8000, 1}, 3},
+ {{"G723", 8000, 1}, 4},
+ {{"DVI4", 8000, 1}, 5},
+ {{"DVI4", 16000, 1}, 6},
+ {{"LPC", 8000, 1}, 7},
+ {{"PCMA", 8000, 1}, 8},
+ {{"G722", 8000, 1}, 9},
+ {{"L16", 44100, 2}, 10},
+ {{"L16", 44100, 1}, 11},
+ {{"QCELP", 8000, 1}, 12},
+ {{"CN", 8000, 1}, 13},
+ // RFC 4566 is a bit ambiguous on the contents of the "encoding
+ // parameters" field, which, for audio, encodes the number of
+ // channels. It is "optional and may be omitted if the number of
+ // channels is one". Does that necessarily imply that an omitted
+ // encoding parameter means one channel? Since RFC 3551 doesn't
+ // specify a value for this parameter for MPA, I've included both 0
+ // and 1 here, to increase the chances it will be correctly used if
+ // someone implements an MPEG audio encoder/decoder.
+ {{"MPA", 90000, 0}, 14},
+ {{"MPA", 90000, 1}, 14},
+ {{"G728", 8000, 1}, 15},
+ {{"DVI4", 11025, 1}, 16},
+ {{"DVI4", 22050, 1}, 17},
+ {{"G729", 8000, 1}, 18},
+
+ // Payload type assignments currently used by WebRTC.
+ // Includes data to reduce collisions (and thus reassignments)
+ {{kGoogleRtpDataCodecName, 0, 0}, kGoogleRtpDataCodecPlType},
+ {{kIlbcCodecName, 8000, 1}, 102},
+ {{kIsacCodecName, 16000, 1}, 103},
+ {{kIsacCodecName, 32000, 1}, 104},
+ {{kCnCodecName, 16000, 1}, 105},
+ {{kCnCodecName, 32000, 1}, 106},
+ {{kGoogleSctpDataCodecName, 0, 0}, kGoogleSctpDataCodecPlType},
+ {{kOpusCodecName, 48000, 2,
+ {{"minptime", "10"}, {"useinbandfec", "1"}}}, 111},
+ // TODO(solenberg): Remove the hard coded 16k,32k,48k DTMF once we
+ // assign payload types dynamically for send side as well.
+ {{kDtmfCodecName, 48000, 1}, 110},
+ {{kDtmfCodecName, 32000, 1}, 112},
+ {{kDtmfCodecName, 16000, 1}, 113},
+ {{kDtmfCodecName, 8000, 1}, 126}}) {
+ // TODO(ossu): Try to keep this as change-proof as possible until we're able
+ // to remove the payload type constants from everywhere in the code.
+ for (const auto& mapping : mappings_) {
+ used_payload_types_.insert(mapping.second);
+ }
+}
+
+PayloadTypeMapper::~PayloadTypeMapper() = default;
+
+rtc::Optional<int> PayloadTypeMapper::GetMappingFor(
+ const webrtc::SdpAudioFormat& format) {
+ auto iter = mappings_.find(format);
+ if (iter != mappings_.end())
+ return iter->second;
+
+ for (; next_unused_payload_type_ <= max_payload_type_;
+ ++next_unused_payload_type_) {
+ int payload_type = next_unused_payload_type_;
+ if (used_payload_types_.find(payload_type) == used_payload_types_.end()) {
+ used_payload_types_.insert(payload_type);
+ mappings_[format] = payload_type;
+ ++next_unused_payload_type_;
+ return payload_type;
+ }
+ }
+
+ return rtc::nullopt;
+}
+
+rtc::Optional<int> PayloadTypeMapper::FindMappingFor(
+ const webrtc::SdpAudioFormat& format) const {
+ auto iter = mappings_.find(format);
+ if (iter != mappings_.end())
+ return iter->second;
+
+ return rtc::nullopt;
+}
+
+rtc::Optional<AudioCodec> PayloadTypeMapper::ToAudioCodec(
+ const webrtc::SdpAudioFormat& format) {
+ // TODO(ossu): We can safely set bitrate to zero here, since that field is
+ // not presented in the SDP. It is used to ferry around some target bitrate
+ // values for certain codecs (ISAC and Opus) and in ways it really
+ // shouldn't. It should be removed once we no longer use CodecInsts in the
+ // ACM or NetEq.
+ auto opt_payload_type = GetMappingFor(format);
+ if (opt_payload_type) {
+ AudioCodec codec(*opt_payload_type, format.name, format.clockrate_hz, 0,
+ format.num_channels);
+ codec.params = format.parameters;
+ return std::move(codec);
+ }
+
+ return rtc::nullopt;
+}
+
+bool PayloadTypeMapper::SdpAudioFormatOrdering::operator()(
+ const webrtc::SdpAudioFormat& a,
+ const webrtc::SdpAudioFormat& b) const {
+ if (a.clockrate_hz == b.clockrate_hz) {
+ if (a.num_channels == b.num_channels) {
+ int name_cmp = STR_CASE_CMP(a.name.c_str(), b.name.c_str());
+ if (name_cmp == 0)
+ return a.parameters < b.parameters;
+ return name_cmp < 0;
+ }
+ return a.num_channels < b.num_channels;
+ }
+ return a.clockrate_hz < b.clockrate_hz;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/payload_type_mapper.h b/third_party/libwebrtc/webrtc/media/engine/payload_type_mapper.h
new file mode 100644
index 0000000000..914c08c97b
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/payload_type_mapper.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_PAYLOAD_TYPE_MAPPER_H_
+#define MEDIA_ENGINE_PAYLOAD_TYPE_MAPPER_H_
+
+#include <map>
+#include <set>
+
+#include "api/audio_codecs/audio_format.h"
+#include "api/optional.h"
+#include "media/base/codec.h"
+
+namespace cricket {
+
+webrtc::SdpAudioFormat AudioCodecToSdpAudioFormat(const AudioCodec& ac);
+
+class PayloadTypeMapper {
+ public:
+ PayloadTypeMapper();
+ ~PayloadTypeMapper();
+
+ // Finds the current payload type for |format| or assigns a new one, if no
+ // current mapping exists. Will return an empty value if it was unable to
+ // create a mapping, i.e. if all dynamic payload type ids have been used up.
+ rtc::Optional<int> GetMappingFor(const webrtc::SdpAudioFormat& format);
+
+ // Finds the current payload type for |format|, if any. Returns an empty value
+ // if no payload type mapping exists for the format.
+ rtc::Optional<int> FindMappingFor(const webrtc::SdpAudioFormat& format) const;
+
+ // Like GetMappingFor, but fills in an AudioCodec structure with the necessary
+ // information instead.
+ rtc::Optional<AudioCodec> ToAudioCodec(const webrtc::SdpAudioFormat& format);
+
+ private:
+ struct SdpAudioFormatOrdering {
+ bool operator()(const webrtc::SdpAudioFormat& a,
+ const webrtc::SdpAudioFormat& b) const;
+ };
+
+ int next_unused_payload_type_;
+ int max_payload_type_;
+ std::map<webrtc::SdpAudioFormat, int, SdpAudioFormatOrdering> mappings_;
+ std::set<int> used_payload_types_;
+};
+
+} // namespace cricket
+#endif // MEDIA_ENGINE_PAYLOAD_TYPE_MAPPER_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/payload_type_mapper_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/payload_type_mapper_unittest.cc
new file mode 100644
index 0000000000..96d56c28d2
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/payload_type_mapper_unittest.cc
@@ -0,0 +1,138 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <set>
+#include <string>
+
+#include "media/engine/payload_type_mapper.h"
+#include "test/gtest.h"
+
+namespace cricket {
+
+class PayloadTypeMapperTest : public testing::Test {
+ protected:
+ PayloadTypeMapper mapper_;
+};
+
+TEST_F(PayloadTypeMapperTest, StaticPayloadTypes) {
+ EXPECT_EQ(0, mapper_.FindMappingFor({"pcmu", 8000, 1}));
+ EXPECT_EQ(3, mapper_.FindMappingFor({"gsm", 8000, 1}));
+ EXPECT_EQ(4, mapper_.FindMappingFor({"g723", 8000, 1}));
+ EXPECT_EQ(5, mapper_.FindMappingFor({"dvi4", 8000, 1}));
+ EXPECT_EQ(6, mapper_.FindMappingFor({"dvi4", 16000, 1}));
+ EXPECT_EQ(7, mapper_.FindMappingFor({"lpc", 8000, 1}));
+ EXPECT_EQ(8, mapper_.FindMappingFor({"pcma", 8000, 1}));
+ EXPECT_EQ(9, mapper_.FindMappingFor({"g722", 8000, 1}));
+ EXPECT_EQ(10, mapper_.FindMappingFor({"l16", 44100, 2}));
+ EXPECT_EQ(11, mapper_.FindMappingFor({"l16", 44100, 1}));
+ EXPECT_EQ(12, mapper_.FindMappingFor({"qcelp", 8000, 1}));
+ EXPECT_EQ(13, mapper_.FindMappingFor({"cn", 8000, 1}));
+ EXPECT_EQ(14, mapper_.FindMappingFor({"mpa", 90000, 0}));
+ EXPECT_EQ(14, mapper_.FindMappingFor({"mpa", 90000, 1}));
+ EXPECT_EQ(15, mapper_.FindMappingFor({"g728", 8000, 1}));
+ EXPECT_EQ(16, mapper_.FindMappingFor({"dvi4", 11025, 1}));
+ EXPECT_EQ(17, mapper_.FindMappingFor({"dvi4", 22050, 1}));
+ EXPECT_EQ(18, mapper_.FindMappingFor({"g729", 8000, 1}));
+}
+
+TEST_F(PayloadTypeMapperTest, WebRTCPayloadTypes) {
+ // Tests that the payload mapper knows about the audio and data formats we've
+ // been using in WebRTC, with their hard coded values.
+ auto data_mapping = [this] (const char *name) {
+ return mapper_.FindMappingFor({name, 0, 0});
+ };
+ EXPECT_EQ(kGoogleRtpDataCodecPlType, data_mapping(kGoogleRtpDataCodecName));
+ EXPECT_EQ(kGoogleSctpDataCodecPlType, data_mapping(kGoogleSctpDataCodecName));
+
+ EXPECT_EQ(102, mapper_.FindMappingFor({kIlbcCodecName, 8000, 1}));
+ EXPECT_EQ(103, mapper_.FindMappingFor({kIsacCodecName, 16000, 1}));
+ EXPECT_EQ(104, mapper_.FindMappingFor({kIsacCodecName, 32000, 1}));
+ EXPECT_EQ(105, mapper_.FindMappingFor({kCnCodecName, 16000, 1}));
+ EXPECT_EQ(106, mapper_.FindMappingFor({kCnCodecName, 32000, 1}));
+ EXPECT_EQ(111, mapper_.FindMappingFor({kOpusCodecName, 48000, 2,
+ {{"minptime", "10"}, {"useinbandfec", "1"}}}));
+ // TODO(solenberg): Remove 16k, 32k, 48k DTMF checks once these payload types
+ // are dynamically assigned.
+ EXPECT_EQ(110, mapper_.FindMappingFor({kDtmfCodecName, 48000, 1}));
+ EXPECT_EQ(112, mapper_.FindMappingFor({kDtmfCodecName, 32000, 1}));
+ EXPECT_EQ(113, mapper_.FindMappingFor({kDtmfCodecName, 16000, 1}));
+ EXPECT_EQ(126, mapper_.FindMappingFor({kDtmfCodecName, 8000, 1}));
+}
+
+TEST_F(PayloadTypeMapperTest, ValidDynamicPayloadTypes) {
+ // RFC 3551 says:
+ // "This profile reserves payload type numbers in the range 96-127
+ // exclusively for dynamic assignment. Applications SHOULD first use
+ // values in this range for dynamic payload types. Those applications
+ // which need to define more than 32 dynamic payload types MAY bind
+ // codes below 96, in which case it is RECOMMENDED that unassigned
+ // payload type numbers be used first. However, the statically assigned
+ // payload types are default bindings and MAY be dynamically bound to
+ // new encodings if needed."
+
+ // Tests that the payload mapper uses values in the dynamic payload type range
+ // (96 - 127) before any others and that the values returned are all valid.
+ bool has_been_below_96 = false;
+ std::set<int> used_payload_types;
+ for (int i = 0; i != 256; ++i) {
+ std::string format_name = "unknown_format_" + std::to_string(i);
+ webrtc::SdpAudioFormat format(format_name.c_str(), i*100, (i % 2) + 1);
+ auto opt_payload_type = mapper_.GetMappingFor(format);
+ bool mapper_is_full = false;
+
+ // There's a limited number of slots for payload types. We're fine with not
+ // being able to map them all.
+ if (opt_payload_type) {
+ int payload_type = *opt_payload_type;
+ EXPECT_FALSE(mapper_is_full) << "Mapping should not fail sporadically";
+ EXPECT_EQ(used_payload_types.find(payload_type), used_payload_types.end())
+ << "Payload types must not be reused";
+ used_payload_types.insert(payload_type);
+ EXPECT_GE(payload_type, 0) << "Negative payload types are invalid";
+ EXPECT_LE(payload_type, 127) << "Payload types above 127 are invalid";
+ EXPECT_FALSE(payload_type >= 96 && has_been_below_96);
+ if (payload_type < 96)
+ has_been_below_96 = true;
+
+ EXPECT_EQ(payload_type, mapper_.FindMappingFor(format))
+ << "Mapping must be permanent after successful call to "
+ "GetMappingFor";
+ EXPECT_EQ(payload_type, mapper_.GetMappingFor(format))
+ << "Subsequent calls to GetMappingFor must return the same value";
+ } else {
+ mapper_is_full = true;
+ }
+ }
+
+ // Also, we must've been able to map at least one dynamic payload type.
+ EXPECT_FALSE(used_payload_types.empty())
+ << "Mapper must support at least one user-defined payload type";
+}
+
+TEST_F(PayloadTypeMapperTest, ToAudioCodec) {
+ webrtc::SdpAudioFormat format("unknown_format", 4711, 17);
+ auto opt_payload_type = mapper_.GetMappingFor(format);
+ EXPECT_TRUE(opt_payload_type);
+ auto opt_audio_codec = mapper_.ToAudioCodec(format);
+ EXPECT_TRUE(opt_audio_codec);
+
+ if (opt_payload_type && opt_audio_codec) {
+ int payload_type = *opt_payload_type;
+ const AudioCodec& codec = *opt_audio_codec;
+
+ EXPECT_EQ(codec.id, payload_type);
+ EXPECT_EQ(codec.name, format.name);
+ EXPECT_EQ(codec.clockrate, format.clockrate_hz);
+ EXPECT_EQ(codec.channels, format.num_channels);
+ EXPECT_EQ(codec.params, format.parameters);
+ }
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/scopedvideodecoder.cc b/third_party/libwebrtc/webrtc/media/engine/scopedvideodecoder.cc
new file mode 100644
index 0000000000..a9f85378ab
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/scopedvideodecoder.cc
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/scopedvideodecoder.h"
+
+#include <vector>
+
+#include "api/video_codecs/video_decoder.h"
+
+namespace cricket {
+
+namespace {
+
+class ScopedVideoDecoder : public webrtc::VideoDecoder {
+ public:
+ ScopedVideoDecoder(WebRtcVideoDecoderFactory* factory,
+ webrtc::VideoDecoder* decoder);
+
+ int32_t InitDecode(const webrtc::VideoCodec* codec_settings,
+ int32_t number_of_cores) override;
+ int32_t RegisterDecodeCompleteCallback(
+ webrtc::DecodedImageCallback* callback) override;
+ int32_t Release() override;
+ int32_t Decode(const webrtc::EncodedImage& input_image,
+ bool missing_frames,
+ const webrtc::RTPFragmentationHeader* fragmentation,
+ const webrtc::CodecSpecificInfo* codec_specific_info,
+ int64_t render_time_ms) override;
+ bool PrefersLateDecoding() const override;
+ const char* ImplementationName() const override;
+
+ ~ScopedVideoDecoder() override;
+
+ private:
+ WebRtcVideoDecoderFactory* factory_;
+ webrtc::VideoDecoder* decoder_;
+};
+
+ScopedVideoDecoder::ScopedVideoDecoder(WebRtcVideoDecoderFactory* factory,
+ webrtc::VideoDecoder* decoder)
+ : factory_(factory), decoder_(decoder) {}
+
+int32_t ScopedVideoDecoder::InitDecode(const webrtc::VideoCodec* codec_settings,
+ int32_t number_of_cores) {
+ return decoder_->InitDecode(codec_settings, number_of_cores);
+}
+
+int32_t ScopedVideoDecoder::RegisterDecodeCompleteCallback(
+ webrtc::DecodedImageCallback* callback) {
+ return decoder_->RegisterDecodeCompleteCallback(callback);
+}
+
+int32_t ScopedVideoDecoder::Release() {
+ return decoder_->Release();
+}
+
+int32_t ScopedVideoDecoder::Decode(
+ const webrtc::EncodedImage& input_image,
+ bool missing_frames,
+ const webrtc::RTPFragmentationHeader* fragmentation,
+ const webrtc::CodecSpecificInfo* codec_specific_info,
+ int64_t render_time_ms) {
+ return decoder_->Decode(input_image, missing_frames, fragmentation,
+ codec_specific_info, render_time_ms);
+}
+
+bool ScopedVideoDecoder::PrefersLateDecoding() const {
+ return decoder_->PrefersLateDecoding();
+}
+
+const char* ScopedVideoDecoder::ImplementationName() const {
+ return decoder_->ImplementationName();
+}
+
+ScopedVideoDecoder::~ScopedVideoDecoder() {
+ factory_->DestroyVideoDecoder(decoder_);
+}
+
+} // namespace
+
+std::unique_ptr<webrtc::VideoDecoder> CreateScopedVideoDecoder(
+ WebRtcVideoDecoderFactory* factory,
+ const VideoCodec& codec,
+ VideoDecoderParams params) {
+ webrtc::VideoDecoder* decoder =
+ factory->CreateVideoDecoderWithParams(codec, params);
+ if (!decoder)
+ return nullptr;
+ return std::unique_ptr<webrtc::VideoDecoder>(
+ new ScopedVideoDecoder(factory, decoder));
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/scopedvideodecoder.h b/third_party/libwebrtc/webrtc/media/engine/scopedvideodecoder.h
new file mode 100644
index 0000000000..983059b9a4
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/scopedvideodecoder.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_SCOPEDVIDEODECODER_H_
+#define MEDIA_ENGINE_SCOPEDVIDEODECODER_H_
+
+#include <memory>
+
+#include "media/engine/webrtcvideodecoderfactory.h"
+
+namespace cricket {
+
+// Helper function that creates a webrtc::VideoDecoder held by an
+// std::unique_ptr instead of having to be deleted through
+// WebRtcVideoDecoderFactory::DestroyVideoDecoder. The factory passed in must
+// outlive the returned encoder.
+// TODO(andersc): This helper function will be deleted once
+// cricket::WebRtcVideoDecoderFactory is deprecated, see
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=7925 for more info.
+std::unique_ptr<webrtc::VideoDecoder> CreateScopedVideoDecoder(
+ cricket::WebRtcVideoDecoderFactory* factory,
+ const VideoCodec& codec,
+ VideoDecoderParams params);
+
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_SCOPEDVIDEODECODER_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/scopedvideoencoder.cc b/third_party/libwebrtc/webrtc/media/engine/scopedvideoencoder.cc
new file mode 100644
index 0000000000..0f563641f6
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/scopedvideoencoder.cc
@@ -0,0 +1,126 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/scopedvideoencoder.h"
+
+#include <vector>
+
+#include "api/video_codecs/video_encoder.h"
+
+namespace cricket {
+
+namespace {
+
+class ScopedVideoEncoder : public webrtc::VideoEncoder {
+ public:
+ ScopedVideoEncoder(WebRtcVideoEncoderFactory* factory,
+ webrtc::VideoEncoder* encoder);
+
+ int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
+ int32_t number_of_cores,
+ size_t max_payload_size) override;
+ int32_t RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* callback) override;
+ int32_t Release() override;
+ int32_t Encode(const webrtc::VideoFrame& frame,
+ const webrtc::CodecSpecificInfo* codec_specific_info,
+ const std::vector<webrtc::FrameType>* frame_types) override;
+ int32_t SetChannelParameters(uint32_t packet_loss, int64_t rtt) override;
+ int32_t SetRates(uint32_t bitrate, uint32_t framerate) override;
+ int32_t SetRateAllocation(const webrtc::BitrateAllocation& allocation,
+ uint32_t framerate) override;
+ ScalingSettings GetScalingSettings() const override;
+ int32_t SetPeriodicKeyFrames(bool enable) override;
+ bool SupportsNativeHandle() const override;
+ const char* ImplementationName() const override;
+
+ ~ScopedVideoEncoder() override;
+
+ private:
+ WebRtcVideoEncoderFactory* factory_;
+ webrtc::VideoEncoder* encoder_;
+};
+
+ScopedVideoEncoder::ScopedVideoEncoder(WebRtcVideoEncoderFactory* factory,
+ webrtc::VideoEncoder* encoder)
+ : factory_(factory), encoder_(encoder) {}
+
+int32_t ScopedVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings,
+ int32_t number_of_cores,
+ size_t max_payload_size) {
+ return encoder_->InitEncode(codec_settings, number_of_cores,
+ max_payload_size);
+}
+
+int32_t ScopedVideoEncoder::RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* callback) {
+ return encoder_->RegisterEncodeCompleteCallback(callback);
+}
+
+int32_t ScopedVideoEncoder::Release() {
+ return encoder_->Release();
+}
+
+int32_t ScopedVideoEncoder::Encode(
+ const webrtc::VideoFrame& frame,
+ const webrtc::CodecSpecificInfo* codec_specific_info,
+ const std::vector<webrtc::FrameType>* frame_types) {
+ return encoder_->Encode(frame, codec_specific_info, frame_types);
+}
+
+int32_t ScopedVideoEncoder::SetChannelParameters(uint32_t packet_loss,
+ int64_t rtt) {
+ return encoder_->SetChannelParameters(packet_loss, rtt);
+}
+
+int32_t ScopedVideoEncoder::SetRates(uint32_t bitrate, uint32_t framerate) {
+ return encoder_->SetRates(bitrate, framerate);
+}
+
+int32_t ScopedVideoEncoder::SetRateAllocation(
+ const webrtc::BitrateAllocation& allocation,
+ uint32_t framerate) {
+ return encoder_->SetRateAllocation(allocation, framerate);
+}
+
+webrtc::VideoEncoder::ScalingSettings ScopedVideoEncoder::GetScalingSettings()
+ const {
+ return encoder_->GetScalingSettings();
+}
+
+int32_t ScopedVideoEncoder::SetPeriodicKeyFrames(bool enable) {
+ return encoder_->SetPeriodicKeyFrames(enable);
+}
+
+bool ScopedVideoEncoder::SupportsNativeHandle() const {
+ return encoder_->SupportsNativeHandle();
+}
+
+const char* ScopedVideoEncoder::ImplementationName() const {
+ return encoder_->ImplementationName();
+}
+
+ScopedVideoEncoder::~ScopedVideoEncoder() {
+ factory_->DestroyVideoEncoder(encoder_);
+}
+
+} // namespace
+
+std::unique_ptr<webrtc::VideoEncoder> CreateScopedVideoEncoder(
+ WebRtcVideoEncoderFactory* factory,
+ const VideoCodec& codec) {
+ webrtc::VideoEncoder* encoder = factory->CreateVideoEncoder(codec);
+ if (!encoder)
+ return nullptr;
+ return std::unique_ptr<webrtc::VideoEncoder>(
+ new ScopedVideoEncoder(factory, encoder));
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/scopedvideoencoder.h b/third_party/libwebrtc/webrtc/media/engine/scopedvideoencoder.h
new file mode 100644
index 0000000000..ac452d7e6c
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/scopedvideoencoder.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_SCOPEDVIDEOENCODER_H_
+#define MEDIA_ENGINE_SCOPEDVIDEOENCODER_H_
+
+#include <memory>
+
+#include "media/engine/webrtcvideoencoderfactory.h"
+
+namespace cricket {
+
+// Helper function that creates a webrtc::VideoEncoder held by an
+// std::unique_ptr instead of having to be deleted through
+// WebRtcVideoEncoderFactory::DestroyVideoEncoder. The factory passed in must
+// outlive the returned encoder.
+// TODO(magjed): This helper function will be deleted once
+// cricket::WebRtcVideoEncoderFactory is deprecated, see
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=7925 for more info.
+std::unique_ptr<webrtc::VideoEncoder> CreateScopedVideoEncoder(
+ cricket::WebRtcVideoEncoderFactory* factory,
+ const VideoCodec& codec);
+
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_SCOPEDVIDEOENCODER_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/simulcast.cc b/third_party/libwebrtc/webrtc/media/engine/simulcast.cc
new file mode 100644
index 0000000000..4ec4afa71f
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/simulcast.cc
@@ -0,0 +1,338 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <algorithm>
+#include <string>
+
+#include "media/base/streamparams.h"
+#include "media/engine/constants.h"
+#include "media/engine/simulcast.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/logging.h"
+#include "system_wrappers/include/field_trial.h"
+
+namespace cricket {
+
+struct SimulcastFormat {
+ int width;
+ int height;
+ // The maximum number of simulcast layers can be used for
+ // resolutions at |widthxheigh|.
+ size_t max_layers;
+ // The maximum bitrate for encoding stream at |widthxheight|, when we are
+ // not sending the next higher spatial stream.
+ int max_bitrate_kbps;
+ // The target bitrate for encoding stream at |widthxheight|, when this layer
+ // is not the highest layer (i.e., when we are sending another higher spatial
+ // stream).
+ int target_bitrate_kbps;
+ // The minimum bitrate needed for encoding stream at |widthxheight|.
+ int min_bitrate_kbps;
+};
+
+// These tables describe from which resolution we can use how many
+// simulcast layers at what bitrates (maximum, target, and minimum).
+// Important!! Keep this table from high resolution to low resolution.
+const SimulcastFormat kSimulcastFormats[] = {
+ {1920, 1080, 3, 5000, 4000, 800},
+ {1280, 720, 3, 2500, 2500, 600},
+ {960, 540, 3, 900, 900, 450},
+ {640, 360, 2, 700, 500, 150},
+ {480, 270, 2, 450, 350, 150},
+ {320, 180, 1, 200, 150, 30},
+ {0, 0, 1, 200, 150, 30}
+};
+
+const int kMaxScreenshareSimulcastStreams = 2;
+
+// Multiway: Number of temporal layers for each simulcast stream, for maximum
+// possible number of simulcast streams |kMaxSimulcastStreams|. The array
+// goes from lowest resolution at position 0 to highest resolution.
+// For example, first three elements correspond to say: QVGA, VGA, WHD.
+static const int
+ kDefaultConferenceNumberOfTemporalLayers[webrtc::kMaxSimulcastStreams] =
+ {3, 3, 3, 3};
+
+void GetSimulcastSsrcs(const StreamParams& sp, std::vector<uint32_t>* ssrcs) {
+ const SsrcGroup* sim_group = sp.get_ssrc_group(kSimSsrcGroupSemantics);
+ if (sim_group) {
+ ssrcs->insert(
+ ssrcs->end(), sim_group->ssrcs.begin(), sim_group->ssrcs.end());
+ }
+}
+
+void MaybeExchangeWidthHeight(int* width, int* height) {
+ // |kSimulcastFormats| assumes |width| >= |height|. If not, exchange them
+ // before comparing.
+ if (*width < *height) {
+ int temp = *width;
+ *width = *height;
+ *height = temp;
+ }
+}
+
+int FindSimulcastFormatIndex(int width, int height) {
+ MaybeExchangeWidthHeight(&width, &height);
+
+ for (uint32_t i = 0; i < arraysize(kSimulcastFormats); ++i) {
+ if (width * height >=
+ kSimulcastFormats[i].width * kSimulcastFormats[i].height) {
+ return i;
+ }
+ }
+ return -1;
+}
+
+int FindSimulcastFormatIndex(int width, int height, size_t max_layers) {
+ MaybeExchangeWidthHeight(&width, &height);
+
+ for (uint32_t i = 0; i < arraysize(kSimulcastFormats); ++i) {
+ if (width * height >=
+ kSimulcastFormats[i].width * kSimulcastFormats[i].height &&
+ max_layers == kSimulcastFormats[i].max_layers) {
+ return i;
+ }
+ }
+ return -1;
+}
+
+// Simulcast stream width and height must both be dividable by
+// |2 ^ simulcast_layers - 1|.
+int NormalizeSimulcastSize(int size, size_t simulcast_layers) {
+ const int base2_exponent = static_cast<int>(simulcast_layers) - 1;
+ return ((size >> base2_exponent) << base2_exponent);
+}
+
+size_t FindSimulcastMaxLayers(int width, int height) {
+ int index = FindSimulcastFormatIndex(width, height);
+ if (index == -1) {
+ return -1;
+ }
+ return kSimulcastFormats[index].max_layers;
+}
+
+// TODO(marpan): Investigate if we should return 0 instead of -1 in
+// FindSimulcast[Max/Target/Min]Bitrate functions below, since the
+// codec struct max/min/targeBitrates are unsigned.
+int FindSimulcastMaxBitrateBps(int width, int height) {
+ const int format_index = FindSimulcastFormatIndex(width, height);
+ if (format_index == -1) {
+ return -1;
+ }
+ return kSimulcastFormats[format_index].max_bitrate_kbps * 1000;
+}
+
+int FindSimulcastTargetBitrateBps(int width, int height) {
+ const int format_index = FindSimulcastFormatIndex(width, height);
+ if (format_index == -1) {
+ return -1;
+ }
+ return kSimulcastFormats[format_index].target_bitrate_kbps * 1000;
+}
+
+int FindSimulcastMinBitrateBps(int width, int height) {
+ const int format_index = FindSimulcastFormatIndex(width, height);
+ if (format_index == -1) {
+ return -1;
+ }
+ return kSimulcastFormats[format_index].min_bitrate_kbps * 1000;
+}
+
+bool SlotSimulcastMaxResolution(size_t max_layers, int* width, int* height) {
+ int index = FindSimulcastFormatIndex(*width, *height, max_layers);
+ if (index == -1) {
+ RTC_LOG(LS_ERROR) << "SlotSimulcastMaxResolution";
+ return false;
+ }
+
+ *width = kSimulcastFormats[index].width;
+ *height = kSimulcastFormats[index].height;
+ RTC_LOG(LS_INFO) << "SlotSimulcastMaxResolution to width:" << *width
+ << " height:" << *height;
+ return true;
+}
+
+int GetTotalMaxBitrateBps(const std::vector<webrtc::VideoStream>& streams) {
+ int total_max_bitrate_bps = 0;
+ for (size_t s = 0; s < streams.size() - 1; ++s) {
+ total_max_bitrate_bps += streams[s].target_bitrate_bps;
+ }
+ total_max_bitrate_bps += streams.back().max_bitrate_bps;
+ return total_max_bitrate_bps;
+}
+
+std::vector<webrtc::VideoStream> GetSimulcastConfig(size_t max_streams,
+ int width,
+ int height,
+ int max_bitrate_bps,
+ int max_qp,
+ int max_framerate,
+ bool is_screencast) {
+ size_t num_simulcast_layers;
+ if (is_screencast) {
+ if (UseSimulcastScreenshare()) {
+ num_simulcast_layers =
+ std::min<int>(max_streams, kMaxScreenshareSimulcastStreams);
+ } else {
+ num_simulcast_layers = 1;
+ }
+ } else {
+ num_simulcast_layers = FindSimulcastMaxLayers(width, height);
+ }
+
+ if (num_simulcast_layers > max_streams) {
+ // If the number of SSRCs in the group differs from our target
+ // number of simulcast streams for current resolution, switch down
+ // to a resolution that matches our number of SSRCs.
+ if (!SlotSimulcastMaxResolution(max_streams, &width, &height)) {
+ return std::vector<webrtc::VideoStream>();
+ }
+ num_simulcast_layers = max_streams;
+ }
+ std::vector<webrtc::VideoStream> streams;
+ streams.resize(num_simulcast_layers);
+
+ if (is_screencast) {
+ ScreenshareLayerConfig config = ScreenshareLayerConfig::GetDefault();
+ // For legacy screenshare in conference mode, tl0 and tl1 bitrates are
+ // piggybacked on the VideoCodec struct as target and max bitrates,
+ // respectively. See eg. webrtc::VP8EncoderImpl::SetRates().
+ streams[0].width = width;
+ streams[0].height = height;
+ streams[0].max_qp = max_qp;
+ streams[0].max_framerate = 5;
+ streams[0].min_bitrate_bps = kMinVideoBitrateBps;
+ streams[0].target_bitrate_bps = config.tl0_bitrate_kbps * 1000;
+ streams[0].max_bitrate_bps = config.tl1_bitrate_kbps * 1000;
+ streams[0].temporal_layer_thresholds_bps.clear();
+ streams[0].temporal_layer_thresholds_bps.push_back(config.tl0_bitrate_kbps *
+ 1000);
+
+ // With simulcast enabled, add another spatial layer. This one will have a
+ // more normal layout, with the regular 3 temporal layer pattern and no fps
+ // restrictions. The base simulcast stream will still use legacy setup.
+ if (num_simulcast_layers == kMaxScreenshareSimulcastStreams) {
+ // Add optional upper simulcast layer.
+ // Lowest temporal layers of a 3 layer setup will have 40% of the total
+ // bitrate allocation for that stream. Make sure the gap between the
+ // target of the lower stream and first temporal layer of the higher one
+ // is at most 2x the bitrate, so that upswitching is not hampered by
+ // stalled bitrate estimates.
+ int max_bitrate_bps = 2 * ((streams[0].target_bitrate_bps * 10) / 4);
+ // Cap max bitrate so it isn't overly high for the given resolution.
+ max_bitrate_bps = std::min<int>(
+ max_bitrate_bps, FindSimulcastMaxBitrateBps(width, height));
+
+ streams[1].width = width;
+ streams[1].height = height;
+ streams[1].max_qp = max_qp;
+ streams[1].max_framerate = max_framerate;
+ // Three temporal layers means two thresholds.
+ streams[1].temporal_layer_thresholds_bps.resize(2);
+ streams[1].min_bitrate_bps = streams[0].target_bitrate_bps * 2;
+ streams[1].target_bitrate_bps = max_bitrate_bps;
+ streams[1].max_bitrate_bps = max_bitrate_bps;
+ }
+ } else {
+ // Format width and height has to be divisible by |2 ^ number_streams - 1|.
+ width = NormalizeSimulcastSize(width, num_simulcast_layers);
+ height = NormalizeSimulcastSize(height, num_simulcast_layers);
+
+ // Add simulcast sub-streams from lower resolution to higher resolutions.
+ // Add simulcast streams, from highest resolution (|s| = number_streams -1)
+ // to lowest resolution at |s| = 0.
+ for (size_t s = num_simulcast_layers - 1;; --s) {
+ streams[s].width = width;
+ streams[s].height = height;
+ // TODO(pbos): Fill actual temporal-layer bitrate thresholds.
+ streams[s].max_qp = max_qp;
+ streams[s].temporal_layer_thresholds_bps.resize(
+ kDefaultConferenceNumberOfTemporalLayers[s] - 1);
+ streams[s].max_bitrate_bps = FindSimulcastMaxBitrateBps(width, height);
+ streams[s].target_bitrate_bps =
+ FindSimulcastTargetBitrateBps(width, height);
+ streams[s].min_bitrate_bps = FindSimulcastMinBitrateBps(width, height);
+ streams[s].max_framerate = max_framerate;
+
+ width /= 2;
+ height /= 2;
+
+ if (s == 0)
+ break;
+ }
+
+ // Spend additional bits to boost the max stream.
+ int bitrate_left_bps = max_bitrate_bps - GetTotalMaxBitrateBps(streams);
+ if (bitrate_left_bps > 0) {
+ streams.back().max_bitrate_bps += bitrate_left_bps;
+ }
+ }
+
+ return streams;
+}
+
+static const int kScreenshareMinBitrateKbps = 50;
+static const int kScreenshareMaxBitrateKbps = 6000;
+static const int kScreenshareDefaultTl0BitrateKbps = 200;
+static const int kScreenshareDefaultTl1BitrateKbps = 1000;
+
+static const char* kScreencastLayerFieldTrialName =
+ "WebRTC-ScreenshareLayerRates";
+static const char* kSimulcastScreenshareFieldTrialName =
+ "WebRTC-SimulcastScreenshare";
+
+ScreenshareLayerConfig::ScreenshareLayerConfig(int tl0_bitrate, int tl1_bitrate)
+ : tl0_bitrate_kbps(tl0_bitrate), tl1_bitrate_kbps(tl1_bitrate) {
+}
+
+ScreenshareLayerConfig ScreenshareLayerConfig::GetDefault() {
+ std::string group =
+ webrtc::field_trial::FindFullName(kScreencastLayerFieldTrialName);
+
+ ScreenshareLayerConfig config(kScreenshareDefaultTl0BitrateKbps,
+ kScreenshareDefaultTl1BitrateKbps);
+ if (!group.empty() && !FromFieldTrialGroup(group, &config)) {
+ RTC_LOG(LS_WARNING) << "Unable to parse WebRTC-ScreenshareLayerRates"
+ " field trial group: '"
+ << group << "'.";
+ }
+ return config;
+}
+
+bool ScreenshareLayerConfig::FromFieldTrialGroup(
+ const std::string& group,
+ ScreenshareLayerConfig* config) {
+ // Parse field trial group name, containing bitrates for tl0 and tl1.
+ int tl0_bitrate;
+ int tl1_bitrate;
+ if (sscanf(group.c_str(), "%d-%d", &tl0_bitrate, &tl1_bitrate) != 2) {
+ return false;
+ }
+
+ // Sanity check.
+ if (tl0_bitrate < kScreenshareMinBitrateKbps ||
+ tl0_bitrate > kScreenshareMaxBitrateKbps ||
+ tl1_bitrate < kScreenshareMinBitrateKbps ||
+ tl1_bitrate > kScreenshareMaxBitrateKbps || tl0_bitrate > tl1_bitrate) {
+ return false;
+ }
+
+ config->tl0_bitrate_kbps = tl0_bitrate;
+ config->tl1_bitrate_kbps = tl1_bitrate;
+
+ return true;
+}
+
+bool UseSimulcastScreenshare() {
+ return webrtc::field_trial::IsEnabled(kSimulcastScreenshareFieldTrialName);
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/simulcast.h b/third_party/libwebrtc/webrtc/media/engine/simulcast.h
new file mode 100644
index 0000000000..84f8c31a89
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/simulcast.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_SIMULCAST_H_
+#define MEDIA_ENGINE_SIMULCAST_H_
+
+#include <string>
+#include <vector>
+
+#include "call/video_config.h"
+#include "rtc_base/basictypes.h"
+
+namespace cricket {
+struct StreamParams;
+
+// TODO(sprang): Remove this, as we're moving away from temporal layer mode.
+// Config for use with screen cast when temporal layers are enabled.
+struct ScreenshareLayerConfig {
+ public:
+ ScreenshareLayerConfig(int tl0_bitrate, int tl1_bitrate);
+
+ // Bitrates, for temporal layers 0 and 1.
+ int tl0_bitrate_kbps;
+ int tl1_bitrate_kbps;
+
+ static ScreenshareLayerConfig GetDefault();
+
+ // Parse bitrate from group name on format "(tl0_bitrate)-(tl1_bitrate)",
+ // eg. "100-1000" for the default rates.
+ static bool FromFieldTrialGroup(const std::string& group,
+ ScreenshareLayerConfig* config);
+};
+
+// TODO(pthatcher): Write unit tests just for these functions,
+// independent of WebrtcVideoEngine.
+
+int GetTotalMaxBitrateBps(const std::vector<webrtc::VideoStream>& streams);
+
+// Get the ssrcs of the SIM group from the stream params.
+void GetSimulcastSsrcs(const StreamParams& sp, std::vector<uint32_t>* ssrcs);
+
+// Get simulcast settings.
+// TODO(sprang): Remove default parameter when it's not longer referenced.
+std::vector<webrtc::VideoStream> GetSimulcastConfig(size_t max_streams,
+ int width,
+ int height,
+ int max_bitrate_bps,
+ int max_qp,
+ int max_framerate,
+ bool is_screencast = false);
+
+bool UseSimulcastScreenshare();
+
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_SIMULCAST_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/simulcast_encoder_adapter.cc b/third_party/libwebrtc/webrtc/media/engine/simulcast_encoder_adapter.cc
new file mode 100644
index 0000000000..02479db27d
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/simulcast_encoder_adapter.cc
@@ -0,0 +1,562 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/simulcast_encoder_adapter.h"
+
+#include <algorithm>
+
+// NOTE(ajm): Path provided by gyp.
+#include "libyuv/scale.h" // NOLINT
+
+#include "api/video/i420_buffer.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "media/engine/scopedvideoencoder.h"
+#include "modules/video_coding/codecs/vp8/screenshare_layers.h"
+#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
+#include "rtc_base/checks.h"
+#include "system_wrappers/include/clock.h"
+
+namespace {
+
+const unsigned int kDefaultMinQp = 2;
+const unsigned int kDefaultMaxQp = 56;
+// Max qp for lowest spatial resolution when doing simulcast.
+const unsigned int kLowestResMaxQp = 45;
+
+uint32_t SumStreamMaxBitrate(int streams, const webrtc::VideoCodec& codec) {
+ uint32_t bitrate_sum = 0;
+ for (int i = 0; i < streams; ++i) {
+ bitrate_sum += codec.simulcastStream[i].maxBitrate;
+ }
+ return bitrate_sum;
+}
+
+int NumberOfStreams(const webrtc::VideoCodec& codec) {
+ int streams =
+ codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams;
+ uint32_t simulcast_max_bitrate = SumStreamMaxBitrate(streams, codec);
+ if (simulcast_max_bitrate == 0) {
+ streams = 1;
+ }
+ return streams;
+}
+
+int VerifyCodec(const webrtc::VideoCodec* inst) {
+ if (inst == nullptr) {
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ if (inst->maxFramerate < 1) {
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ // allow zero to represent an unspecified maxBitRate
+ if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) {
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ if (inst->width <= 1 || inst->height <= 1) {
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ if (inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) {
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool StreamResolutionCompare(const webrtc::SimulcastStream& a,
+ const webrtc::SimulcastStream& b) {
+ return std::tie(a.height, a.width, a.maxBitrate) <
+ std::tie(b.height, b.width, b.maxBitrate);
+}
+
+// An EncodedImageCallback implementation that forwards on calls to a
+// SimulcastEncoderAdapter, but with the stream index it's registered with as
+// the first parameter to Encoded.
+class AdapterEncodedImageCallback : public webrtc::EncodedImageCallback {
+ public:
+ AdapterEncodedImageCallback(webrtc::SimulcastEncoderAdapter* adapter,
+ size_t stream_idx)
+ : adapter_(adapter), stream_idx_(stream_idx) {}
+
+ EncodedImageCallback::Result OnEncodedImage(
+ const webrtc::EncodedImage& encoded_image,
+ const webrtc::CodecSpecificInfo* codec_specific_info,
+ const webrtc::RTPFragmentationHeader* fragmentation) override {
+ return adapter_->OnEncodedImage(stream_idx_, encoded_image,
+ codec_specific_info, fragmentation);
+ }
+
+ private:
+ webrtc::SimulcastEncoderAdapter* const adapter_;
+ const size_t stream_idx_;
+};
+
+// Utility class used to adapt the simulcast id as reported by the temporal
+// layers factory, since each sub-encoder will report stream 0.
+class TemporalLayersFactoryAdapter : public webrtc::TemporalLayersFactory {
+ public:
+ TemporalLayersFactoryAdapter(int adapted_simulcast_id,
+ const TemporalLayersFactory& tl_factory)
+ : adapted_simulcast_id_(adapted_simulcast_id), tl_factory_(tl_factory) {}
+ ~TemporalLayersFactoryAdapter() override {}
+ webrtc::TemporalLayers* Create(int simulcast_id, int temporal_layers,
+ uint8_t initial_tl0_pic_idx) const override {
+ return tl_factory_.Create(adapted_simulcast_id_, temporal_layers,
+ initial_tl0_pic_idx);
+ }
+ std::unique_ptr<webrtc::TemporalLayersChecker> CreateChecker(
+ int simulcast_id, int temporal_layers,
+ uint8_t initial_tl0_pic_idx) const override {
+ return tl_factory_.CreateChecker(adapted_simulcast_id_, temporal_layers,
+ initial_tl0_pic_idx);
+ }
+
+ const int adapted_simulcast_id_;
+ const TemporalLayersFactory& tl_factory_;
+};
+
+} // namespace
+
+namespace webrtc {
+
+SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory)
+ : inited_(0),
+ factory_(factory),
+ cricket_factory_(nullptr),
+ encoded_complete_callback_(nullptr),
+ implementation_name_("SimulcastEncoderAdapter") {
+ // The adapter is typically created on the worker thread, but operated on
+ // the encoder task queue.
+ encoder_queue_.Detach();
+
+ memset(&codec_, 0, sizeof(webrtc::VideoCodec));
+}
+
+SimulcastEncoderAdapter::SimulcastEncoderAdapter(
+ cricket::WebRtcVideoEncoderFactory* factory)
+ : inited_(0),
+ factory_(nullptr),
+ cricket_factory_(factory),
+ encoded_complete_callback_(nullptr),
+ implementation_name_("SimulcastEncoderAdapter") {
+ // The adapter is typically created on the worker thread, but operated on
+ // the encoder task queue.
+ encoder_queue_.Detach();
+
+ memset(&codec_, 0, sizeof(webrtc::VideoCodec));
+}
+
+SimulcastEncoderAdapter::~SimulcastEncoderAdapter() {
+ RTC_DCHECK(!Initialized());
+ DestroyStoredEncoders();
+}
+
+int SimulcastEncoderAdapter::Release() {
+ RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
+
+ while (!streaminfos_.empty()) {
+ std::unique_ptr<VideoEncoder> encoder =
+ std::move(streaminfos_.back().encoder);
+ // Even though it seems very unlikely, there are no guarantees that the
+ // encoder will not call back after being Release()'d. Therefore, we first
+ // disable the callbacks here.
+ encoder->RegisterEncodeCompleteCallback(nullptr);
+ encoder->Release();
+ streaminfos_.pop_back(); // Deletes callback adapter.
+ stored_encoders_.push(std::move(encoder));
+ }
+
+ // It's legal to move the encoder to another queue now.
+ encoder_queue_.Detach();
+
+ rtc::AtomicOps::ReleaseStore(&inited_, 0);
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int SimulcastEncoderAdapter::InitEncode(const VideoCodec* inst,
+ int number_of_cores,
+ size_t max_payload_size) {
+ RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
+
+ if (number_of_cores < 1) {
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+
+ int ret = VerifyCodec(inst);
+ if (ret < 0) {
+ return ret;
+ }
+
+ ret = Release();
+ if (ret < 0) {
+ return ret;
+ }
+
+ int number_of_streams = NumberOfStreams(*inst);
+ RTC_DCHECK_LE(number_of_streams, kMaxSimulcastStreams);
+ const bool doing_simulcast = (number_of_streams > 1);
+
+ codec_ = *inst;
+ SimulcastRateAllocator rate_allocator(codec_, nullptr);
+ BitrateAllocation allocation = rate_allocator.GetAllocation(
+ codec_.startBitrate * 1000, codec_.maxFramerate);
+ std::vector<uint32_t> start_bitrates;
+ for (int i = 0; i < kMaxSimulcastStreams; ++i) {
+ uint32_t stream_bitrate = allocation.GetSpatialLayerSum(i) / 1000;
+ start_bitrates.push_back(stream_bitrate);
+ }
+
+ std::string implementation_name;
+ // Create |number_of_streams| of encoder instances and init them.
+
+ const auto minmax = std::minmax_element(
+ std::begin(codec_.simulcastStream),
+ std::begin(codec_.simulcastStream) + number_of_streams,
+ StreamResolutionCompare);
+ const auto lowest_resolution_stream_index =
+ std::distance(std::begin(codec_.simulcastStream), minmax.first);
+ const auto highest_resolution_stream_index =
+ std::distance(std::begin(codec_.simulcastStream), minmax.second);
+
+ RTC_DCHECK_LT(lowest_resolution_stream_index, number_of_streams);
+ RTC_DCHECK_LT(highest_resolution_stream_index, number_of_streams);
+
+ for (int i = 0; i < number_of_streams; ++i) {
+ VideoCodec stream_codec;
+ uint32_t start_bitrate_kbps = start_bitrates[i];
+ if (!doing_simulcast) {
+ stream_codec = codec_;
+ stream_codec.numberOfSimulcastStreams = 1;
+ } else {
+ // Cap start bitrate to the min bitrate in order to avoid strange codec
+ // behavior. Since sending sending will be false, this should not matter.
+ StreamResolution stream_resolution =
+ i == highest_resolution_stream_index
+ ? StreamResolution::HIGHEST
+ : i == lowest_resolution_stream_index ? StreamResolution::LOWEST
+ : StreamResolution::OTHER;
+
+ start_bitrate_kbps =
+ std::max(codec_.simulcastStream[i].minBitrate, start_bitrate_kbps);
+ PopulateStreamCodec(codec_, i, start_bitrate_kbps, stream_resolution,
+ &stream_codec);
+ }
+ TemporalLayersFactoryAdapter tl_factory_adapter(i,
+ *codec_.VP8()->tl_factory);
+ stream_codec.VP8()->tl_factory = &tl_factory_adapter;
+
+ // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl.
+ if (stream_codec.qpMax < kDefaultMinQp) {
+ stream_codec.qpMax = kDefaultMaxQp;
+ }
+
+ // If an existing encoder instance exists, reuse it.
+ // TODO(brandtr): Set initial RTP state (e.g., picture_id/tl0_pic_idx) here,
+ // when we start storing that state outside the encoder wrappers.
+ std::unique_ptr<VideoEncoder> encoder;
+ if (!stored_encoders_.empty()) {
+ encoder = std::move(stored_encoders_.top());
+ stored_encoders_.pop();
+ } else {
+ encoder = factory_ ? factory_->CreateVideoEncoder(SdpVideoFormat("VP8"))
+ : CreateScopedVideoEncoder(cricket_factory_,
+ cricket::VideoCodec("VP8"));
+ }
+
+ ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size);
+ if (ret < 0) {
+ // Explicitly destroy the current encoder; because we haven't registered a
+ // StreamInfo for it yet, Release won't do anything about it.
+ encoder.reset();
+ Release();
+ return ret;
+ }
+ std::unique_ptr<EncodedImageCallback> callback(
+ new AdapterEncodedImageCallback(this, i));
+ encoder->RegisterEncodeCompleteCallback(callback.get());
+ streaminfos_.emplace_back(std::move(encoder), std::move(callback),
+ stream_codec.width, stream_codec.height,
+ start_bitrate_kbps > 0);
+
+ if (i != 0) {
+ implementation_name += ", ";
+ }
+ implementation_name += streaminfos_[i].encoder->ImplementationName();
+ }
+
+ if (doing_simulcast) {
+ implementation_name_ =
+ "SimulcastEncoderAdapter (" + implementation_name + ")";
+ } else {
+ implementation_name_ = implementation_name;
+ }
+
+ // To save memory, don't store encoders that we don't use.
+ DestroyStoredEncoders();
+
+ rtc::AtomicOps::ReleaseStore(&inited_, 1);
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int SimulcastEncoderAdapter::Encode(
+ const VideoFrame& input_image, const CodecSpecificInfo* codec_specific_info,
+ const std::vector<FrameType>* frame_types) {
+ RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
+
+ if (!Initialized()) {
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+ if (encoded_complete_callback_ == nullptr) {
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+
+ // All active streams should generate a key frame if
+ // a key frame is requested by any stream.
+ bool send_key_frame = false;
+ if (frame_types) {
+ for (size_t i = 0; i < frame_types->size(); ++i) {
+ if (frame_types->at(i) == kVideoFrameKey) {
+ send_key_frame = true;
+ break;
+ }
+ }
+ }
+ for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
+ if (streaminfos_[stream_idx].key_frame_request &&
+ streaminfos_[stream_idx].send_stream) {
+ send_key_frame = true;
+ break;
+ }
+ }
+
+ int src_width = input_image.width();
+ int src_height = input_image.height();
+ for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
+ // Don't encode frames in resolutions that we don't intend to send.
+ if (!streaminfos_[stream_idx].send_stream) {
+ continue;
+ }
+
+ std::vector<FrameType> stream_frame_types;
+ if (send_key_frame) {
+ stream_frame_types.push_back(kVideoFrameKey);
+ streaminfos_[stream_idx].key_frame_request = false;
+ } else {
+ stream_frame_types.push_back(kVideoFrameDelta);
+ }
+
+ int dst_width = streaminfos_[stream_idx].width;
+ int dst_height = streaminfos_[stream_idx].height;
+ // If scaling isn't required, because the input resolution
+ // matches the destination or the input image is empty (e.g.
+ // a keyframe request for encoders with internal camera
+ // sources) or the source image has a native handle, pass the image on
+ // directly. Otherwise, we'll scale it to match what the encoder expects
+ // (below).
+ // For texture frames, the underlying encoder is expected to be able to
+ // correctly sample/scale the source texture.
+ // TODO(perkj): ensure that works going forward, and figure out how this
+ // affects webrtc:5683.
+ if ((dst_width == src_width && dst_height == src_height) ||
+ input_image.video_frame_buffer()->type() ==
+ VideoFrameBuffer::Type::kNative) {
+ int ret = streaminfos_[stream_idx].encoder->Encode(
+ input_image, codec_specific_info, &stream_frame_types);
+ if (ret != WEBRTC_VIDEO_CODEC_OK) {
+ return ret;
+ }
+ } else {
+ rtc::scoped_refptr<I420Buffer> dst_buffer =
+ I420Buffer::Create(dst_width, dst_height);
+ rtc::scoped_refptr<I420BufferInterface> src_buffer =
+ input_image.video_frame_buffer()->ToI420();
+ libyuv::I420Scale(src_buffer->DataY(), src_buffer->StrideY(),
+ src_buffer->DataU(), src_buffer->StrideU(),
+ src_buffer->DataV(), src_buffer->StrideV(), src_width,
+ src_height, dst_buffer->MutableDataY(),
+ dst_buffer->StrideY(), dst_buffer->MutableDataU(),
+ dst_buffer->StrideU(), dst_buffer->MutableDataV(),
+ dst_buffer->StrideV(), dst_width, dst_height,
+ libyuv::kFilterBilinear);
+
+ int ret = streaminfos_[stream_idx].encoder->Encode(
+ VideoFrame(dst_buffer, input_image.timestamp(),
+ input_image.render_time_ms(), webrtc::kVideoRotation_0),
+ codec_specific_info, &stream_frame_types);
+ if (ret != WEBRTC_VIDEO_CODEC_OK) {
+ return ret;
+ }
+ }
+ }
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) {
+ RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
+ encoded_complete_callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss,
+ int64_t rtt) {
+ RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
+ for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
+ streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt);
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int SimulcastEncoderAdapter::SetRateAllocation(const BitrateAllocation& bitrate,
+ uint32_t new_framerate) {
+ RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
+
+ if (!Initialized()) {
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+
+ if (new_framerate < 1) {
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+
+ if (codec_.maxBitrate > 0 && bitrate.get_sum_kbps() > codec_.maxBitrate) {
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+
+ if (bitrate.get_sum_bps() > 0) {
+ // Make sure the bitrate fits the configured min bitrates. 0 is a special
+ // value that means paused, though, so leave it alone.
+ if (bitrate.get_sum_kbps() < codec_.minBitrate) {
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+
+ if (codec_.numberOfSimulcastStreams > 0 &&
+ bitrate.get_sum_kbps() < codec_.simulcastStream[0].minBitrate) {
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ }
+
+ codec_.maxFramerate = new_framerate;
+
+ for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
+ uint32_t stream_bitrate_kbps =
+ bitrate.GetSpatialLayerSum(stream_idx) / 1000;
+
+ // Need a key frame if we have not sent this stream before.
+ if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) {
+ streaminfos_[stream_idx].key_frame_request = true;
+ }
+ streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0;
+
+ // Slice the temporal layers out of the full allocation and pass it on to
+ // the encoder handling the current simulcast stream.
+ BitrateAllocation stream_allocation;
+ for (int i = 0; i < kMaxTemporalStreams; ++i) {
+ if (bitrate.HasBitrate(stream_idx, i)) {
+ stream_allocation.SetBitrate(0, i, bitrate.GetBitrate(stream_idx, i));
+ }
+ }
+ streaminfos_[stream_idx].encoder->SetRateAllocation(stream_allocation,
+ new_framerate);
+ }
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+// TODO(brandtr): Add task checker to this member function, when all encoder
+// callbacks are coming in on the encoder queue.
+EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage(
+ size_t stream_idx, const EncodedImage& encodedImage,
+ const CodecSpecificInfo* codecSpecificInfo,
+ const RTPFragmentationHeader* fragmentation) {
+ CodecSpecificInfo stream_codec_specific = *codecSpecificInfo;
+ stream_codec_specific.codec_name = implementation_name_.c_str();
+ CodecSpecificInfoVP8* vp8Info = &(stream_codec_specific.codecSpecific.VP8);
+ vp8Info->simulcastIdx = stream_idx;
+
+ return encoded_complete_callback_->OnEncodedImage(
+ encodedImage, &stream_codec_specific, fragmentation);
+}
+
+void SimulcastEncoderAdapter::PopulateStreamCodec(
+ const webrtc::VideoCodec& inst, int stream_index,
+ uint32_t start_bitrate_kbps, StreamResolution stream_resolution,
+ webrtc::VideoCodec* stream_codec) {
+ *stream_codec = inst;
+
+ // Stream specific settings.
+ stream_codec->VP8()->numberOfTemporalLayers =
+ inst.simulcastStream[stream_index].numberOfTemporalLayers;
+ stream_codec->numberOfSimulcastStreams = 0;
+ stream_codec->width = inst.simulcastStream[stream_index].width;
+ stream_codec->height = inst.simulcastStream[stream_index].height;
+ stream_codec->maxBitrate = inst.simulcastStream[stream_index].maxBitrate;
+ stream_codec->minBitrate = inst.simulcastStream[stream_index].minBitrate;
+ stream_codec->qpMax = inst.simulcastStream[stream_index].qpMax;
+ // Settings that are based on stream/resolution.
+ if (stream_resolution == StreamResolution::LOWEST) {
+ // Settings for lowest spatial resolutions.
+ stream_codec->qpMax = kLowestResMaxQp;
+ }
+ if (stream_resolution != StreamResolution::HIGHEST) {
+ // For resolutions below CIF, set the codec |complexity| parameter to
+ // kComplexityHigher, which maps to cpu_used = -4.
+ int pixels_per_frame = stream_codec->width * stream_codec->height;
+ if (pixels_per_frame < 352 * 288) {
+ stream_codec->VP8()->complexity = webrtc::kComplexityHigher;
+ }
+ // Turn off denoising for all streams but the highest resolution.
+ stream_codec->VP8()->denoisingOn = false;
+ }
+ // TODO(ronghuawu): what to do with targetBitrate.
+
+ stream_codec->startBitrate = start_bitrate_kbps;
+}
+
+bool SimulcastEncoderAdapter::Initialized() const {
+ return rtc::AtomicOps::AcquireLoad(&inited_) == 1;
+}
+
+void SimulcastEncoderAdapter::DestroyStoredEncoders() {
+ while (!stored_encoders_.empty()) {
+ stored_encoders_.pop();
+ }
+}
+
+bool SimulcastEncoderAdapter::SupportsNativeHandle() const {
+ RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
+ // We should not be calling this method before streaminfos_ are configured.
+ RTC_DCHECK(!streaminfos_.empty());
+ for (const auto& streaminfo : streaminfos_) {
+ if (!streaminfo.encoder->SupportsNativeHandle()) {
+ return false;
+ }
+ }
+ return true;
+}
+
+VideoEncoder::ScalingSettings SimulcastEncoderAdapter::GetScalingSettings()
+ const {
+ // TODO(brandtr): Investigate why the sequence checker below fails on mac.
+ // RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
+ // Turn off quality scaling for simulcast.
+ if (!Initialized() || NumberOfStreams(codec_) != 1) {
+ return VideoEncoder::ScalingSettings(false);
+ }
+ return streaminfos_[0].encoder->GetScalingSettings();
+}
+
+const char* SimulcastEncoderAdapter::ImplementationName() const {
+ RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
+ return implementation_name_.c_str();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/simulcast_encoder_adapter.h b/third_party/libwebrtc/webrtc/media/engine/simulcast_encoder_adapter.h
new file mode 100644
index 0000000000..799e8fb761
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/simulcast_encoder_adapter.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef MEDIA_ENGINE_SIMULCAST_ENCODER_ADAPTER_H_
+#define MEDIA_ENGINE_SIMULCAST_ENCODER_ADAPTER_H_
+
+#include <memory>
+#include <stack>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "media/engine/webrtcvideoencoderfactory.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "rtc_base/atomicops.h"
+#include "rtc_base/sequenced_task_checker.h"
+
+namespace webrtc {
+
+class SimulcastRateAllocator;
+class VideoEncoderFactory;
+
+// SimulcastEncoderAdapter implements simulcast support by creating multiple
+// webrtc::VideoEncoder instances with the given VideoEncoderFactory.
+// The object is created and destroyed on the worker thread, but all public
+// interfaces should be called from the encoder task queue.
+class SimulcastEncoderAdapter : public VP8Encoder {
+ public:
+ explicit SimulcastEncoderAdapter(VideoEncoderFactory* factory);
+ // Deprecated - use webrtc::VideoEncoderFactory instead.
+ explicit SimulcastEncoderAdapter(cricket::WebRtcVideoEncoderFactory* factory);
+ virtual ~SimulcastEncoderAdapter();
+
+ // Implements VideoEncoder.
+ int Release() override;
+ int InitEncode(const VideoCodec* inst, int number_of_cores,
+ size_t max_payload_size) override;
+ int Encode(const VideoFrame& input_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const std::vector<FrameType>* frame_types) override;
+ int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;
+ int SetChannelParameters(uint32_t packet_loss, int64_t rtt) override;
+ int SetRateAllocation(const BitrateAllocation& bitrate,
+ uint32_t new_framerate) override;
+
+ // Eventual handler for the contained encoders' EncodedImageCallbacks, but
+ // called from an internal helper that also knows the correct stream
+ // index.
+ EncodedImageCallback::Result OnEncodedImage(
+ size_t stream_idx, const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation);
+
+ VideoEncoder::ScalingSettings GetScalingSettings() const override;
+
+ bool SupportsNativeHandle() const override;
+ const char* ImplementationName() const override;
+
+ private:
+ struct StreamInfo {
+ StreamInfo(std::unique_ptr<VideoEncoder> encoder,
+ std::unique_ptr<EncodedImageCallback> callback, uint16_t width,
+ uint16_t height, bool send_stream)
+ : encoder(std::move(encoder)),
+ callback(std::move(callback)),
+ width(width),
+ height(height),
+ key_frame_request(false),
+ send_stream(send_stream) {}
+ std::unique_ptr<VideoEncoder> encoder;
+ std::unique_ptr<EncodedImageCallback> callback;
+ uint16_t width;
+ uint16_t height;
+ bool key_frame_request;
+ bool send_stream;
+ };
+
+ enum class StreamResolution {
+ OTHER,
+ HIGHEST,
+ LOWEST,
+ };
+
+ // Populate the codec settings for each simulcast stream.
+ static void PopulateStreamCodec(const webrtc::VideoCodec& inst,
+ int stream_index, uint32_t start_bitrate_kbps,
+ StreamResolution stream_resolution,
+ webrtc::VideoCodec* stream_codec);
+
+ bool Initialized() const;
+
+ void DestroyStoredEncoders();
+
+ volatile int inited_; // Accessed atomically.
+ VideoEncoderFactory* const factory_;
+ cricket::WebRtcVideoEncoderFactory* const cricket_factory_;
+ VideoCodec codec_;
+ std::vector<StreamInfo> streaminfos_;
+ EncodedImageCallback* encoded_complete_callback_;
+ std::string implementation_name_;
+
+ // Used for checking the single-threaded access of the encoder interface.
+ rtc::SequencedTaskChecker encoder_queue_;
+
+ // Store encoders in between calls to Release and InitEncode, so they don't
+ // have to be recreated. Remaining encoders are destroyed by the destructor.
+ std::stack<std::unique_ptr<VideoEncoder>> stored_encoders_;
+};
+
+} // namespace webrtc
+
+#endif // MEDIA_ENGINE_SIMULCAST_ENCODER_ADAPTER_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/simulcast_encoder_adapter_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/simulcast_encoder_adapter_unittest.cc
new file mode 100644
index 0000000000..8087d7be0b
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/simulcast_encoder_adapter_unittest.cc
@@ -0,0 +1,831 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <array>
+#include <memory>
+#include <vector>
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "common_video/include/video_frame_buffer.h"
+#include "media/engine/internalencoderfactory.h"
+#include "media/engine/simulcast_encoder_adapter.h"
+#include "modules/video_coding/codecs/vp8/simulcast_test_utility.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "rtc_base/ptr_util.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+namespace testing {
+
+class TestSimulcastEncoderAdapter : public TestVp8Simulcast {
+ public:
+ TestSimulcastEncoderAdapter() : factory_(new InternalEncoderFactory()) {}
+
+ protected:
+ std::unique_ptr<VP8Encoder> CreateEncoder() override {
+ return rtc::MakeUnique<SimulcastEncoderAdapter>(factory_.get());
+ }
+ std::unique_ptr<VP8Decoder> CreateDecoder() override {
+ return VP8Decoder::Create();
+ }
+
+ private:
+ std::unique_ptr<VideoEncoderFactory> factory_;
+};
+
+TEST_F(TestSimulcastEncoderAdapter, TestKeyFrameRequestsOnAllStreams) {
+ TestVp8Simulcast::TestKeyFrameRequestsOnAllStreams();
+}
+
+TEST_F(TestSimulcastEncoderAdapter, TestPaddingAllStreams) {
+ TestVp8Simulcast::TestPaddingAllStreams();
+}
+
+TEST_F(TestSimulcastEncoderAdapter, TestPaddingTwoStreams) {
+ TestVp8Simulcast::TestPaddingTwoStreams();
+}
+
+TEST_F(TestSimulcastEncoderAdapter, TestPaddingTwoStreamsOneMaxedOut) {
+ TestVp8Simulcast::TestPaddingTwoStreamsOneMaxedOut();
+}
+
+TEST_F(TestSimulcastEncoderAdapter, TestPaddingOneStream) {
+ TestVp8Simulcast::TestPaddingOneStream();
+}
+
+TEST_F(TestSimulcastEncoderAdapter, TestPaddingOneStreamTwoMaxedOut) {
+ TestVp8Simulcast::TestPaddingOneStreamTwoMaxedOut();
+}
+
+TEST_F(TestSimulcastEncoderAdapter, TestSendAllStreams) {
+ TestVp8Simulcast::TestSendAllStreams();
+}
+
+TEST_F(TestSimulcastEncoderAdapter, TestDisablingStreams) {
+ TestVp8Simulcast::TestDisablingStreams();
+}
+
+TEST_F(TestSimulcastEncoderAdapter, TestSwitchingToOneStream) {
+ TestVp8Simulcast::TestSwitchingToOneStream();
+}
+
+TEST_F(TestSimulcastEncoderAdapter, TestSwitchingToOneOddStream) {
+ TestVp8Simulcast::TestSwitchingToOneOddStream();
+}
+
+TEST_F(TestSimulcastEncoderAdapter, TestStrideEncodeDecode) {
+ TestVp8Simulcast::TestStrideEncodeDecode();
+}
+
+TEST_F(TestSimulcastEncoderAdapter, TestSaptioTemporalLayers333PatternEncoder) {
+ TestVp8Simulcast::TestSaptioTemporalLayers333PatternEncoder();
+}
+
+TEST_F(TestSimulcastEncoderAdapter, TestSpatioTemporalLayers321PatternEncoder) {
+ TestVp8Simulcast::TestSpatioTemporalLayers321PatternEncoder();
+}
+
+class MockVideoEncoder;
+
+class MockVideoEncoderFactory : public VideoEncoderFactory {
+ public:
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override;
+
+ std::unique_ptr<VideoEncoder> CreateVideoEncoder(
+ const SdpVideoFormat& format) override;
+
+ CodecInfo QueryVideoEncoder(const SdpVideoFormat& format) const override;
+
+ const std::vector<MockVideoEncoder*>& encoders() const;
+ void SetEncoderNames(const std::vector<const char*>& encoder_names);
+ void set_init_encode_return_value(int32_t value);
+
+ void DestroyVideoEncoder(VideoEncoder* encoder);
+
+ private:
+ int32_t init_encode_return_value_ = 0;
+ std::vector<MockVideoEncoder*> encoders_;
+ std::vector<const char*> encoder_names_;
+};
+
+class MockVideoEncoder : public VideoEncoder {
+ public:
+ explicit MockVideoEncoder(MockVideoEncoderFactory* factory)
+ : factory_(factory) {}
+
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
+ int32_t InitEncode(const VideoCodec* codecSettings,
+ int32_t numberOfCores,
+ size_t maxPayloadSize) /* override */ {
+ codec_ = *codecSettings;
+ return init_encode_return_value_;
+ }
+
+ MOCK_METHOD3(
+ Encode,
+ int32_t(const VideoFrame& inputImage,
+ const CodecSpecificInfo* codecSpecificInfo,
+ const std::vector<FrameType>* frame_types) /* override */);
+
+ int32_t RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) /* override */ {
+ callback_ = callback;
+ return 0;
+ }
+
+ MOCK_METHOD0(Release, int32_t());
+
+ int32_t SetRateAllocation(const BitrateAllocation& bitrate_allocation,
+ uint32_t framerate) {
+ last_set_bitrate_ = bitrate_allocation;
+ return 0;
+ }
+
+ MOCK_METHOD2(SetChannelParameters, int32_t(uint32_t packetLoss, int64_t rtt));
+
+ bool SupportsNativeHandle() const /* override */ {
+ return supports_native_handle_;
+ }
+
+ virtual ~MockVideoEncoder() { factory_->DestroyVideoEncoder(this); }
+
+ const VideoCodec& codec() const { return codec_; }
+
+ void SendEncodedImage(int width, int height) {
+ // Sends a fake image of the given width/height.
+ EncodedImage image;
+ image._encodedWidth = width;
+ image._encodedHeight = height;
+ CodecSpecificInfo codec_specific_info;
+ memset(&codec_specific_info, 0, sizeof(codec_specific_info));
+ callback_->OnEncodedImage(image, &codec_specific_info, nullptr);
+ }
+
+ void set_supports_native_handle(bool enabled) {
+ supports_native_handle_ = enabled;
+ }
+
+ void set_init_encode_return_value(int32_t value) {
+ init_encode_return_value_ = value;
+ }
+
+ BitrateAllocation last_set_bitrate() const { return last_set_bitrate_; }
+
+ MOCK_CONST_METHOD0(ImplementationName, const char*());
+
+ private:
+ MockVideoEncoderFactory* const factory_;
+ bool supports_native_handle_ = false;
+ int32_t init_encode_return_value_ = 0;
+ BitrateAllocation last_set_bitrate_;
+
+ VideoCodec codec_;
+ EncodedImageCallback* callback_;
+};
+
+std::vector<SdpVideoFormat> MockVideoEncoderFactory::GetSupportedFormats()
+ const {
+ std::vector<SdpVideoFormat> formats = {SdpVideoFormat("VP8")};
+ return formats;
+}
+
+std::unique_ptr<VideoEncoder> MockVideoEncoderFactory::CreateVideoEncoder(
+ const SdpVideoFormat& format) {
+ std::unique_ptr<MockVideoEncoder> encoder(
+ new ::testing::NiceMock<MockVideoEncoder>(this));
+ encoder->set_init_encode_return_value(init_encode_return_value_);
+ const char* encoder_name = encoder_names_.empty()
+ ? "codec_implementation_name"
+ : encoder_names_[encoders_.size()];
+ ON_CALL(*encoder, ImplementationName()).WillByDefault(Return(encoder_name));
+ encoders_.push_back(encoder.get());
+ return encoder;
+}
+
+void MockVideoEncoderFactory::DestroyVideoEncoder(VideoEncoder* encoder) {
+ for (size_t i = 0; i < encoders_.size(); ++i) {
+ if (encoders_[i] == encoder) {
+ encoders_.erase(encoders_.begin() + i);
+ break;
+ }
+ }
+}
+
+VideoEncoderFactory::CodecInfo MockVideoEncoderFactory::QueryVideoEncoder(
+ const SdpVideoFormat& format) const {
+ return CodecInfo();
+}
+
+const std::vector<MockVideoEncoder*>& MockVideoEncoderFactory::encoders()
+ const {
+ return encoders_;
+}
+void MockVideoEncoderFactory::SetEncoderNames(
+ const std::vector<const char*>& encoder_names) {
+ encoder_names_ = encoder_names;
+}
+void MockVideoEncoderFactory::set_init_encode_return_value(int32_t value) {
+ init_encode_return_value_ = value;
+}
+
+class TestSimulcastEncoderAdapterFakeHelper {
+ public:
+ TestSimulcastEncoderAdapterFakeHelper()
+ : factory_(new MockVideoEncoderFactory()) {}
+
+ // Can only be called once as the SimulcastEncoderAdapter will take the
+ // ownership of |factory_|.
+ VP8Encoder* CreateMockEncoderAdapter() {
+ return new SimulcastEncoderAdapter(factory_.get());
+ }
+
+ void ExpectCallSetChannelParameters(uint32_t packetLoss, int64_t rtt) {
+ EXPECT_TRUE(!factory_->encoders().empty());
+ for (size_t i = 0; i < factory_->encoders().size(); ++i) {
+ EXPECT_CALL(*factory_->encoders()[i],
+ SetChannelParameters(packetLoss, rtt))
+ .Times(1);
+ }
+ }
+
+ MockVideoEncoderFactory* factory() { return factory_.get(); }
+
+ private:
+ std::unique_ptr<MockVideoEncoderFactory> factory_;
+};
+
+static const int kTestTemporalLayerProfile[3] = {3, 2, 1};
+
+class TestSimulcastEncoderAdapterFake : public ::testing::Test,
+ public EncodedImageCallback {
+ public:
+ TestSimulcastEncoderAdapterFake()
+ : helper_(new TestSimulcastEncoderAdapterFakeHelper()),
+ adapter_(helper_->CreateMockEncoderAdapter()),
+ last_encoded_image_width_(-1),
+ last_encoded_image_height_(-1),
+ last_encoded_image_simulcast_index_(-1) {}
+ virtual ~TestSimulcastEncoderAdapterFake() {
+ if (adapter_) {
+ adapter_->Release();
+ }
+ }
+
+ Result OnEncodedImage(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) override {
+ last_encoded_image_width_ = encoded_image._encodedWidth;
+ last_encoded_image_height_ = encoded_image._encodedHeight;
+ if (codec_specific_info) {
+ last_encoded_image_simulcast_index_ =
+ codec_specific_info->codecSpecific.VP8.simulcastIdx;
+ }
+ return Result(Result::OK, encoded_image._timeStamp);
+ }
+
+ bool GetLastEncodedImageInfo(int* out_width,
+ int* out_height,
+ int* out_simulcast_index) {
+ if (last_encoded_image_width_ == -1) {
+ return false;
+ }
+ *out_width = last_encoded_image_width_;
+ *out_height = last_encoded_image_height_;
+ *out_simulcast_index = last_encoded_image_simulcast_index_;
+ return true;
+ }
+
+ void SetupCodec() {
+ TestVp8Simulcast::DefaultSettings(
+ &codec_, static_cast<const int*>(kTestTemporalLayerProfile));
+ rate_allocator_.reset(new SimulcastRateAllocator(codec_, nullptr));
+ tl_factory_.SetListener(rate_allocator_.get());
+ codec_.VP8()->tl_factory = &tl_factory_;
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+ adapter_->RegisterEncodeCompleteCallback(this);
+ }
+
+ void VerifyCodec(const VideoCodec& ref, int stream_index) {
+ const VideoCodec& target =
+ helper_->factory()->encoders()[stream_index]->codec();
+ EXPECT_EQ(ref.codecType, target.codecType);
+ EXPECT_EQ(0, strcmp(ref.plName, target.plName));
+ EXPECT_EQ(ref.plType, target.plType);
+ EXPECT_EQ(ref.width, target.width);
+ EXPECT_EQ(ref.height, target.height);
+ EXPECT_EQ(ref.startBitrate, target.startBitrate);
+ EXPECT_EQ(ref.maxBitrate, target.maxBitrate);
+ EXPECT_EQ(ref.minBitrate, target.minBitrate);
+ EXPECT_EQ(ref.maxFramerate, target.maxFramerate);
+ EXPECT_EQ(ref.VP8().pictureLossIndicationOn,
+ target.VP8().pictureLossIndicationOn);
+ EXPECT_EQ(ref.VP8().complexity, target.VP8().complexity);
+ EXPECT_EQ(ref.VP8().resilience, target.VP8().resilience);
+ EXPECT_EQ(ref.VP8().numberOfTemporalLayers,
+ target.VP8().numberOfTemporalLayers);
+ EXPECT_EQ(ref.VP8().denoisingOn, target.VP8().denoisingOn);
+ EXPECT_EQ(ref.VP8().errorConcealmentOn, target.VP8().errorConcealmentOn);
+ EXPECT_EQ(ref.VP8().automaticResizeOn, target.VP8().automaticResizeOn);
+ EXPECT_EQ(ref.VP8().frameDroppingOn, target.VP8().frameDroppingOn);
+ EXPECT_EQ(ref.VP8().keyFrameInterval, target.VP8().keyFrameInterval);
+ EXPECT_EQ(ref.qpMax, target.qpMax);
+ EXPECT_EQ(0, target.numberOfSimulcastStreams);
+ EXPECT_EQ(ref.mode, target.mode);
+
+ // No need to compare simulcastStream as numberOfSimulcastStreams should
+ // always be 0.
+ }
+
+ void InitRefCodec(int stream_index, VideoCodec* ref_codec) {
+ *ref_codec = codec_;
+ ref_codec->VP8()->numberOfTemporalLayers =
+ kTestTemporalLayerProfile[stream_index];
+ ref_codec->VP8()->tl_factory = &tl_factory_;
+ ref_codec->width = codec_.simulcastStream[stream_index].width;
+ ref_codec->height = codec_.simulcastStream[stream_index].height;
+ ref_codec->maxBitrate = codec_.simulcastStream[stream_index].maxBitrate;
+ ref_codec->minBitrate = codec_.simulcastStream[stream_index].minBitrate;
+ ref_codec->qpMax = codec_.simulcastStream[stream_index].qpMax;
+ }
+
+ void VerifyCodecSettings() {
+ EXPECT_EQ(3u, helper_->factory()->encoders().size());
+ VideoCodec ref_codec;
+
+ // stream 0, the lowest resolution stream.
+ InitRefCodec(0, &ref_codec);
+ ref_codec.qpMax = 45;
+ ref_codec.VP8()->complexity = webrtc::kComplexityHigher;
+ ref_codec.VP8()->denoisingOn = false;
+ ref_codec.startBitrate = 100; // Should equal to the target bitrate.
+ VerifyCodec(ref_codec, 0);
+
+ // stream 1
+ InitRefCodec(1, &ref_codec);
+ ref_codec.VP8()->denoisingOn = false;
+ // The start bitrate (300kbit) minus what we have for the lower layers
+ // (100kbit).
+ ref_codec.startBitrate = 200;
+ VerifyCodec(ref_codec, 1);
+
+ // stream 2, the biggest resolution stream.
+ InitRefCodec(2, &ref_codec);
+ // We don't have enough bits to send this, so the adapter should have
+ // configured it to use the min bitrate for this layer (600kbit) but turn
+ // off sending.
+ ref_codec.startBitrate = 600;
+ VerifyCodec(ref_codec, 2);
+ }
+
+ protected:
+ std::unique_ptr<TestSimulcastEncoderAdapterFakeHelper> helper_;
+ std::unique_ptr<VP8Encoder> adapter_;
+ VideoCodec codec_;
+ int last_encoded_image_width_;
+ int last_encoded_image_height_;
+ int last_encoded_image_simulcast_index_;
+ TemporalLayersFactory tl_factory_;
+ std::unique_ptr<SimulcastRateAllocator> rate_allocator_;
+};
+
+TEST_F(TestSimulcastEncoderAdapterFake, InitEncode) {
+ SetupCodec();
+ VerifyCodecSettings();
+}
+
+TEST_F(TestSimulcastEncoderAdapterFake, ReleaseWithoutInitEncode) {
+ EXPECT_EQ(0, adapter_->Release());
+}
+
+TEST_F(TestSimulcastEncoderAdapterFake, Reinit) {
+ SetupCodec();
+ EXPECT_EQ(0, adapter_->Release());
+
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+}
+
+TEST_F(TestSimulcastEncoderAdapterFake, SetChannelParameters) {
+ SetupCodec();
+ const uint32_t packetLoss = 5;
+ const int64_t rtt = 30;
+ helper_->ExpectCallSetChannelParameters(packetLoss, rtt);
+ adapter_->SetChannelParameters(packetLoss, rtt);
+}
+
+TEST_F(TestSimulcastEncoderAdapterFake, EncodedCallbackForDifferentEncoders) {
+ SetupCodec();
+
+ // Set bitrates so that we send all layers.
+ adapter_->SetRateAllocation(rate_allocator_->GetAllocation(1200, 30), 30);
+
+ // At this point, the simulcast encoder adapter should have 3 streams: HD,
+ // quarter HD, and quarter quarter HD. We're going to mostly ignore the exact
+ // resolutions, to test that the adapter forwards on the correct resolution
+ // and simulcast index values, going only off the encoder that generates the
+ // image.
+ std::vector<MockVideoEncoder*> encoders = helper_->factory()->encoders();
+ ASSERT_EQ(3u, encoders.size());
+ encoders[0]->SendEncodedImage(1152, 704);
+ int width;
+ int height;
+ int simulcast_index;
+ EXPECT_TRUE(GetLastEncodedImageInfo(&width, &height, &simulcast_index));
+ EXPECT_EQ(1152, width);
+ EXPECT_EQ(704, height);
+ EXPECT_EQ(0, simulcast_index);
+
+ encoders[1]->SendEncodedImage(300, 620);
+ EXPECT_TRUE(GetLastEncodedImageInfo(&width, &height, &simulcast_index));
+ EXPECT_EQ(300, width);
+ EXPECT_EQ(620, height);
+ EXPECT_EQ(1, simulcast_index);
+
+ encoders[2]->SendEncodedImage(120, 240);
+ EXPECT_TRUE(GetLastEncodedImageInfo(&width, &height, &simulcast_index));
+ EXPECT_EQ(120, width);
+ EXPECT_EQ(240, height);
+ EXPECT_EQ(2, simulcast_index);
+}
+
+// This test verifies that the underlying encoders are reused, when the adapter
+// is reinited with different number of simulcast streams. It further checks
+// that the allocated encoders are reused in the same order as before, starting
+// with the lowest stream.
+TEST_F(TestSimulcastEncoderAdapterFake, ReusesEncodersInOrder) {
+ // Set up common settings for three streams.
+ TestVp8Simulcast::DefaultSettings(
+ &codec_, static_cast<const int*>(kTestTemporalLayerProfile));
+ rate_allocator_.reset(new SimulcastRateAllocator(codec_, nullptr));
+ tl_factory_.SetListener(rate_allocator_.get());
+ codec_.VP8()->tl_factory = &tl_factory_;
+ adapter_->RegisterEncodeCompleteCallback(this);
+
+ // Input data.
+ rtc::scoped_refptr<VideoFrameBuffer> buffer(I420Buffer::Create(1280, 720));
+ VideoFrame input_frame(buffer, 100, 1000, kVideoRotation_180);
+ std::vector<FrameType> frame_types;
+
+ // Encode with three streams.
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+ VerifyCodecSettings();
+ std::vector<MockVideoEncoder*> original_encoders =
+ helper_->factory()->encoders();
+ ASSERT_EQ(3u, original_encoders.size());
+ EXPECT_CALL(*original_encoders[0], Encode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*original_encoders[1], Encode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*original_encoders[2], Encode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ frame_types.resize(3, kVideoFrameKey);
+ EXPECT_EQ(0, adapter_->Encode(input_frame, nullptr, &frame_types));
+ EXPECT_CALL(*original_encoders[0], Release())
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*original_encoders[1], Release())
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*original_encoders[2], Release())
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_EQ(0, adapter_->Release());
+
+ // Encode with two streams.
+ codec_.width /= 2;
+ codec_.height /= 2;
+ codec_.numberOfSimulcastStreams = 2;
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+ std::vector<MockVideoEncoder*> new_encoders = helper_->factory()->encoders();
+ ASSERT_EQ(2u, new_encoders.size());
+ ASSERT_EQ(original_encoders[0], new_encoders[0]);
+ EXPECT_CALL(*original_encoders[0], Encode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ ASSERT_EQ(original_encoders[1], new_encoders[1]);
+ EXPECT_CALL(*original_encoders[1], Encode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ frame_types.resize(2, kVideoFrameKey);
+ EXPECT_EQ(0, adapter_->Encode(input_frame, nullptr, &frame_types));
+ EXPECT_CALL(*original_encoders[0], Release())
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*original_encoders[1], Release())
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_EQ(0, adapter_->Release());
+
+ // Encode with single stream.
+ codec_.width /= 2;
+ codec_.height /= 2;
+ codec_.numberOfSimulcastStreams = 1;
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+ new_encoders = helper_->factory()->encoders();
+ ASSERT_EQ(1u, new_encoders.size());
+ ASSERT_EQ(original_encoders[0], new_encoders[0]);
+ EXPECT_CALL(*original_encoders[0], Encode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ frame_types.resize(1, kVideoFrameKey);
+ EXPECT_EQ(0, adapter_->Encode(input_frame, nullptr, &frame_types));
+ EXPECT_CALL(*original_encoders[0], Release())
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_EQ(0, adapter_->Release());
+
+ // Encode with three streams, again.
+ codec_.width *= 4;
+ codec_.height *= 4;
+ codec_.numberOfSimulcastStreams = 3;
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+ new_encoders = helper_->factory()->encoders();
+ ASSERT_EQ(3u, new_encoders.size());
+ // The first encoder is reused.
+ ASSERT_EQ(original_encoders[0], new_encoders[0]);
+ EXPECT_CALL(*original_encoders[0], Encode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ // The second and third encoders are new.
+ EXPECT_CALL(*new_encoders[1], Encode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*new_encoders[2], Encode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ frame_types.resize(3, kVideoFrameKey);
+ EXPECT_EQ(0, adapter_->Encode(input_frame, nullptr, &frame_types));
+ EXPECT_CALL(*original_encoders[0], Release())
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*new_encoders[1], Release())
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*new_encoders[2], Release())
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_EQ(0, adapter_->Release());
+}
+
+TEST_F(TestSimulcastEncoderAdapterFake, DoesNotLeakEncoders) {
+ SetupCodec();
+ VerifyCodecSettings();
+
+ EXPECT_EQ(3u, helper_->factory()->encoders().size());
+
+ // The adapter should destroy all encoders it has allocated. Since
+ // |helper_->factory()| is owned by |adapter_|, however, we need to rely on
+ // lsan to find leaks here.
+ EXPECT_EQ(0, adapter_->Release());
+ adapter_.reset();
+}
+
+// This test verifies that an adapter reinit with the same codec settings as
+// before does not change the underlying encoder codec settings.
+TEST_F(TestSimulcastEncoderAdapterFake, ReinitDoesNotReorderEncoderSettings) {
+ SetupCodec();
+ VerifyCodecSettings();
+
+ // Capture current codec settings.
+ std::vector<MockVideoEncoder*> encoders = helper_->factory()->encoders();
+ ASSERT_EQ(3u, encoders.size());
+ std::array<VideoCodec, 3> codecs_before;
+ for (int i = 0; i < 3; ++i) {
+ codecs_before[i] = encoders[i]->codec();
+ }
+
+ // Reinitialize and verify that the new codec settings are the same.
+ EXPECT_EQ(0, adapter_->Release());
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+ for (int i = 0; i < 3; ++i) {
+ const VideoCodec& codec_before = codecs_before[i];
+ const VideoCodec& codec_after = encoders[i]->codec();
+
+ // webrtc::VideoCodec does not implement operator==.
+ EXPECT_EQ(codec_before.codecType, codec_after.codecType);
+ EXPECT_EQ(codec_before.plType, codec_after.plType);
+ EXPECT_EQ(codec_before.width, codec_after.width);
+ EXPECT_EQ(codec_before.height, codec_after.height);
+ EXPECT_EQ(codec_before.startBitrate, codec_after.startBitrate);
+ EXPECT_EQ(codec_before.maxBitrate, codec_after.maxBitrate);
+ EXPECT_EQ(codec_before.minBitrate, codec_after.minBitrate);
+ EXPECT_EQ(codec_before.targetBitrate, codec_after.targetBitrate);
+ EXPECT_EQ(codec_before.maxFramerate, codec_after.maxFramerate);
+ EXPECT_EQ(codec_before.qpMax, codec_after.qpMax);
+ EXPECT_EQ(codec_before.numberOfSimulcastStreams,
+ codec_after.numberOfSimulcastStreams);
+ EXPECT_EQ(codec_before.mode, codec_after.mode);
+ EXPECT_EQ(codec_before.expect_encode_from_texture,
+ codec_after.expect_encode_from_texture);
+ }
+}
+
+// This test is similar to the one above, except that it tests the simulcastIdx
+// from the CodecSpecificInfo that is connected to an encoded frame. The
+// PayloadRouter demuxes the incoming encoded frames on different RTP modules
+// using the simulcastIdx, so it's important that there is no corresponding
+// encoder reordering in between adapter reinits as this would lead to PictureID
+// discontinuities.
+TEST_F(TestSimulcastEncoderAdapterFake, ReinitDoesNotReorderFrameSimulcastIdx) {
+ SetupCodec();
+ adapter_->SetRateAllocation(rate_allocator_->GetAllocation(1200, 30), 30);
+ VerifyCodecSettings();
+
+ // Send frames on all streams.
+ std::vector<MockVideoEncoder*> encoders = helper_->factory()->encoders();
+ ASSERT_EQ(3u, encoders.size());
+ encoders[0]->SendEncodedImage(1152, 704);
+ int width;
+ int height;
+ int simulcast_index;
+ EXPECT_TRUE(GetLastEncodedImageInfo(&width, &height, &simulcast_index));
+ EXPECT_EQ(0, simulcast_index);
+
+ encoders[1]->SendEncodedImage(300, 620);
+ EXPECT_TRUE(GetLastEncodedImageInfo(&width, &height, &simulcast_index));
+ EXPECT_EQ(1, simulcast_index);
+
+ encoders[2]->SendEncodedImage(120, 240);
+ EXPECT_TRUE(GetLastEncodedImageInfo(&width, &height, &simulcast_index));
+ EXPECT_EQ(2, simulcast_index);
+
+ // Reinitialize.
+ EXPECT_EQ(0, adapter_->Release());
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+ adapter_->SetRateAllocation(rate_allocator_->GetAllocation(1200, 30), 30);
+
+ // Verify that the same encoder sends out frames on the same simulcast index.
+ encoders[0]->SendEncodedImage(1152, 704);
+ EXPECT_TRUE(GetLastEncodedImageInfo(&width, &height, &simulcast_index));
+ EXPECT_EQ(0, simulcast_index);
+
+ encoders[1]->SendEncodedImage(300, 620);
+ EXPECT_TRUE(GetLastEncodedImageInfo(&width, &height, &simulcast_index));
+ EXPECT_EQ(1, simulcast_index);
+
+ encoders[2]->SendEncodedImage(120, 240);
+ EXPECT_TRUE(GetLastEncodedImageInfo(&width, &height, &simulcast_index));
+ EXPECT_EQ(2, simulcast_index);
+}
+
+TEST_F(TestSimulcastEncoderAdapterFake, SupportsNativeHandleForSingleStreams) {
+ TestVp8Simulcast::DefaultSettings(
+ &codec_, static_cast<const int*>(kTestTemporalLayerProfile));
+ codec_.VP8()->tl_factory = &tl_factory_;
+ codec_.numberOfSimulcastStreams = 1;
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+ adapter_->RegisterEncodeCompleteCallback(this);
+ ASSERT_EQ(1u, helper_->factory()->encoders().size());
+ helper_->factory()->encoders()[0]->set_supports_native_handle(true);
+ EXPECT_TRUE(adapter_->SupportsNativeHandle());
+ helper_->factory()->encoders()[0]->set_supports_native_handle(false);
+ EXPECT_FALSE(adapter_->SupportsNativeHandle());
+}
+
+TEST_F(TestSimulcastEncoderAdapterFake, SetRatesUnderMinBitrate) {
+ TestVp8Simulcast::DefaultSettings(
+ &codec_, static_cast<const int*>(kTestTemporalLayerProfile));
+ codec_.VP8()->tl_factory = &tl_factory_;
+ codec_.minBitrate = 50;
+ codec_.numberOfSimulcastStreams = 1;
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+ rate_allocator_.reset(new SimulcastRateAllocator(codec_, nullptr));
+
+ // Above min should be respected.
+ BitrateAllocation target_bitrate =
+ rate_allocator_->GetAllocation(codec_.minBitrate * 1000, 30);
+ adapter_->SetRateAllocation(target_bitrate, 30);
+ EXPECT_EQ(target_bitrate,
+ helper_->factory()->encoders()[0]->last_set_bitrate());
+
+ // Below min but non-zero should be replaced with the min bitrate.
+ BitrateAllocation too_low_bitrate =
+ rate_allocator_->GetAllocation((codec_.minBitrate - 1) * 1000, 30);
+ adapter_->SetRateAllocation(too_low_bitrate, 30);
+ EXPECT_EQ(target_bitrate,
+ helper_->factory()->encoders()[0]->last_set_bitrate());
+
+ // Zero should be passed on as is, since it means "pause".
+ adapter_->SetRateAllocation(BitrateAllocation(), 30);
+ EXPECT_EQ(BitrateAllocation(),
+ helper_->factory()->encoders()[0]->last_set_bitrate());
+}
+
+TEST_F(TestSimulcastEncoderAdapterFake, SupportsImplementationName) {
+ EXPECT_STREQ("SimulcastEncoderAdapter", adapter_->ImplementationName());
+ TestVp8Simulcast::DefaultSettings(
+ &codec_, static_cast<const int*>(kTestTemporalLayerProfile));
+ codec_.VP8()->tl_factory = &tl_factory_;
+ std::vector<const char*> encoder_names;
+ encoder_names.push_back("codec1");
+ encoder_names.push_back("codec2");
+ encoder_names.push_back("codec3");
+ helper_->factory()->SetEncoderNames(encoder_names);
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+ EXPECT_STREQ("SimulcastEncoderAdapter (codec1, codec2, codec3)",
+ adapter_->ImplementationName());
+
+ // Single streams should not expose "SimulcastEncoderAdapter" in name.
+ EXPECT_EQ(0, adapter_->Release());
+ codec_.numberOfSimulcastStreams = 1;
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+ adapter_->RegisterEncodeCompleteCallback(this);
+ ASSERT_EQ(1u, helper_->factory()->encoders().size());
+ EXPECT_STREQ("codec1", adapter_->ImplementationName());
+}
+
+TEST_F(TestSimulcastEncoderAdapterFake,
+ SupportsNativeHandleForMultipleStreams) {
+ TestVp8Simulcast::DefaultSettings(
+ &codec_, static_cast<const int*>(kTestTemporalLayerProfile));
+ codec_.VP8()->tl_factory = &tl_factory_;
+ codec_.numberOfSimulcastStreams = 3;
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+ adapter_->RegisterEncodeCompleteCallback(this);
+ ASSERT_EQ(3u, helper_->factory()->encoders().size());
+ for (MockVideoEncoder* encoder : helper_->factory()->encoders())
+ encoder->set_supports_native_handle(true);
+ // If one encoder doesn't support it, then overall support is disabled.
+ helper_->factory()->encoders()[0]->set_supports_native_handle(false);
+ EXPECT_FALSE(adapter_->SupportsNativeHandle());
+ // Once all do, then the adapter claims support.
+ helper_->factory()->encoders()[0]->set_supports_native_handle(true);
+ EXPECT_TRUE(adapter_->SupportsNativeHandle());
+}
+
+// TODO(nisse): Reuse definition in webrtc/test/fake_texture_handle.h.
+class FakeNativeBuffer : public VideoFrameBuffer {
+ public:
+ FakeNativeBuffer(int width, int height) : width_(width), height_(height) {}
+
+ Type type() const override { return Type::kNative; }
+ int width() const override { return width_; }
+ int height() const override { return height_; }
+
+ rtc::scoped_refptr<I420BufferInterface> ToI420() override {
+ RTC_NOTREACHED();
+ return nullptr;
+ }
+
+ private:
+ const int width_;
+ const int height_;
+};
+
+TEST_F(TestSimulcastEncoderAdapterFake,
+ NativeHandleForwardingForMultipleStreams) {
+ TestVp8Simulcast::DefaultSettings(
+ &codec_, static_cast<const int*>(kTestTemporalLayerProfile));
+ codec_.VP8()->tl_factory = &tl_factory_;
+ codec_.numberOfSimulcastStreams = 3;
+ // High start bitrate, so all streams are enabled.
+ codec_.startBitrate = 3000;
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+ adapter_->RegisterEncodeCompleteCallback(this);
+ ASSERT_EQ(3u, helper_->factory()->encoders().size());
+ for (MockVideoEncoder* encoder : helper_->factory()->encoders())
+ encoder->set_supports_native_handle(true);
+ EXPECT_TRUE(adapter_->SupportsNativeHandle());
+
+ rtc::scoped_refptr<VideoFrameBuffer> buffer(
+ new rtc::RefCountedObject<FakeNativeBuffer>(1280, 720));
+ VideoFrame input_frame(buffer, 100, 1000, kVideoRotation_180);
+ // Expect calls with the given video frame verbatim, since it's a texture
+ // frame and can't otherwise be modified/resized.
+ for (MockVideoEncoder* encoder : helper_->factory()->encoders())
+ EXPECT_CALL(*encoder, Encode(::testing::Ref(input_frame), _, _)).Times(1);
+ std::vector<FrameType> frame_types(3, kVideoFrameKey);
+ EXPECT_EQ(0, adapter_->Encode(input_frame, nullptr, &frame_types));
+}
+
+TEST_F(TestSimulcastEncoderAdapterFake, TestFailureReturnCodesFromEncodeCalls) {
+ TestVp8Simulcast::DefaultSettings(
+ &codec_, static_cast<const int*>(kTestTemporalLayerProfile));
+ codec_.VP8()->tl_factory = &tl_factory_;
+ codec_.numberOfSimulcastStreams = 3;
+ EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
+ adapter_->RegisterEncodeCompleteCallback(this);
+ ASSERT_EQ(3u, helper_->factory()->encoders().size());
+ // Tell the 2nd encoder to request software fallback.
+ EXPECT_CALL(*helper_->factory()->encoders()[1], Encode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE));
+
+ // Send a fake frame and assert the return is software fallback.
+ rtc::scoped_refptr<I420Buffer> input_buffer =
+ I420Buffer::Create(kDefaultWidth, kDefaultHeight);
+ input_buffer->InitializeData();
+ VideoFrame input_frame(input_buffer, 0, 0, webrtc::kVideoRotation_0);
+ std::vector<FrameType> frame_types(3, kVideoFrameKey);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE,
+ adapter_->Encode(input_frame, nullptr, &frame_types));
+}
+
+TEST_F(TestSimulcastEncoderAdapterFake, TestInitFailureCleansUpEncoders) {
+ TestVp8Simulcast::DefaultSettings(
+ &codec_, static_cast<const int*>(kTestTemporalLayerProfile));
+ codec_.VP8()->tl_factory = &tl_factory_;
+ codec_.numberOfSimulcastStreams = 3;
+ helper_->factory()->set_init_encode_return_value(
+ WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE,
+ adapter_->InitEncode(&codec_, 1, 1200));
+ EXPECT_TRUE(helper_->factory()->encoders().empty());
+}
+
+} // namespace testing
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/simulcast_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/simulcast_unittest.cc
new file mode 100644
index 0000000000..1433c5c335
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/simulcast_unittest.cc
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+
+#include "media/engine/simulcast.h"
+#include "test/gtest.h"
+
+namespace cricket {
+
+class ScreenshareLayerConfigTest : public testing::Test,
+ protected ScreenshareLayerConfig {
+ public:
+ ScreenshareLayerConfigTest() : ScreenshareLayerConfig(0, 0) {}
+
+ void ExpectParsingFails(const std::string& group) {
+ ScreenshareLayerConfig config(100, 1000);
+ EXPECT_FALSE(FromFieldTrialGroup(group, &config));
+ }
+};
+
+TEST_F(ScreenshareLayerConfigTest, UsesDefaultBitrateConfigForDefaultGroup) {
+ ExpectParsingFails("");
+}
+
+TEST_F(ScreenshareLayerConfigTest, UsesDefaultConfigForInvalidBitrates) {
+ ExpectParsingFails("-");
+ ExpectParsingFails("1-");
+ ExpectParsingFails("-1");
+ ExpectParsingFails("-12");
+ ExpectParsingFails("12-");
+ ExpectParsingFails("booh!");
+ ExpectParsingFails("1-b");
+ ExpectParsingFails("a-2");
+ ExpectParsingFails("49-1000");
+ ExpectParsingFails("50-6001");
+ ExpectParsingFails("100-99");
+ ExpectParsingFails("1002003004005006-99");
+ ExpectParsingFails("99-1002003004005006");
+}
+
+TEST_F(ScreenshareLayerConfigTest, ParsesValidBitrateConfig) {
+ ScreenshareLayerConfig config(100, 1000);
+ EXPECT_TRUE(ScreenshareLayerConfig::FromFieldTrialGroup("101-1001", &config));
+ EXPECT_EQ(101, config.tl0_bitrate_kbps);
+ EXPECT_EQ(1001, config.tl1_bitrate_kbps);
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/videodecodersoftwarefallbackwrapper.cc b/third_party/libwebrtc/webrtc/media/engine/videodecodersoftwarefallbackwrapper.cc
new file mode 100644
index 0000000000..87fb62239a
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/videodecodersoftwarefallbackwrapper.cc
@@ -0,0 +1,141 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/videodecodersoftwarefallbackwrapper.h"
+
+#include <string>
+#include <utility>
+
+#include "media/engine/internaldecoderfactory.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/trace_event.h"
+
+namespace webrtc {
+
+VideoDecoderSoftwareFallbackWrapper::VideoDecoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoDecoder> sw_fallback_decoder,
+ std::unique_ptr<VideoDecoder> hw_decoder)
+ : use_hw_decoder_(true),
+ hw_decoder_(std::move(hw_decoder)),
+ hw_decoder_initialized_(false),
+ fallback_decoder_(std::move(sw_fallback_decoder)),
+ fallback_implementation_name_(
+ std::string(fallback_decoder_->ImplementationName()) +
+ " (fallback from: " + hw_decoder_->ImplementationName() + ")"),
+ callback_(nullptr) {}
+
+int32_t VideoDecoderSoftwareFallbackWrapper::InitDecode(
+ const VideoCodec* codec_settings,
+ int32_t number_of_cores) {
+ // Always try to use the HW decoder in this state.
+ use_hw_decoder_ = true;
+ codec_settings_ = *codec_settings;
+ number_of_cores_ = number_of_cores;
+ int32_t ret = hw_decoder_->InitDecode(codec_settings, number_of_cores);
+ if (ret == WEBRTC_VIDEO_CODEC_OK) {
+ hw_decoder_initialized_ = true;
+ return ret;
+ }
+ hw_decoder_initialized_ = false;
+
+ // Try to initialize fallback decoder.
+ if (InitFallbackDecoder())
+ return WEBRTC_VIDEO_CODEC_OK;
+
+ return ret;
+}
+
+bool VideoDecoderSoftwareFallbackWrapper::InitFallbackDecoder() {
+ RTC_LOG(LS_WARNING) << "Decoder falling back to software decoding.";
+ if (fallback_decoder_->InitDecode(&codec_settings_, number_of_cores_) !=
+ WEBRTC_VIDEO_CODEC_OK) {
+ RTC_LOG(LS_ERROR) << "Failed to initialize software-decoder fallback.";
+ use_hw_decoder_ = true;
+ return false;
+ }
+ if (callback_)
+ fallback_decoder_->RegisterDecodeCompleteCallback(callback_);
+ use_hw_decoder_ = false;
+ return true;
+}
+
+int32_t VideoDecoderSoftwareFallbackWrapper::Decode(
+ const EncodedImage& input_image,
+ bool missing_frames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codec_specific_info,
+ int64_t render_time_ms) {
+ TRACE_EVENT0("webrtc", "VideoDecoderSoftwareFallbackWrapper::Decode");
+ // Try initializing and decoding with the provided decoder on every keyframe
+ // or when there's no fallback decoder. This is the normal case.
+ if (use_hw_decoder_ || input_image._frameType == kVideoFrameKey) {
+ int32_t ret = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ // Try reinitializing the decoder if it had failed before.
+ if (!hw_decoder_initialized_) {
+ hw_decoder_initialized_ =
+ hw_decoder_->InitDecode(&codec_settings_, number_of_cores_) ==
+ WEBRTC_VIDEO_CODEC_OK;
+ }
+ if (hw_decoder_initialized_) {
+ ret = hw_decoder_->Decode(input_image, missing_frames, fragmentation,
+ codec_specific_info, render_time_ms);
+ }
+ if (ret == WEBRTC_VIDEO_CODEC_OK) {
+ if (!use_hw_decoder_) {
+ // Decode OK -> stop using fallback decoder.
+ RTC_LOG(LS_WARNING)
+ << "Decode OK, no longer using the software fallback decoder.";
+ use_hw_decoder_ = true;
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ }
+ if (ret != WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE)
+ return ret;
+ if (use_hw_decoder_) {
+ // Try to initialize fallback decoder.
+ if (!InitFallbackDecoder())
+ return ret;
+ }
+ }
+ return fallback_decoder_->Decode(input_image, missing_frames, fragmentation,
+ codec_specific_info, render_time_ms);
+}
+
+int32_t VideoDecoderSoftwareFallbackWrapper::RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) {
+ callback_ = callback;
+ int32_t ret = hw_decoder_->RegisterDecodeCompleteCallback(callback);
+ if (!use_hw_decoder_)
+ return fallback_decoder_->RegisterDecodeCompleteCallback(callback);
+ return ret;
+}
+
+int32_t VideoDecoderSoftwareFallbackWrapper::Release() {
+ if (!use_hw_decoder_) {
+ RTC_LOG(LS_INFO) << "Releasing software fallback decoder.";
+ fallback_decoder_->Release();
+ }
+ hw_decoder_initialized_ = false;
+ return hw_decoder_->Release();
+}
+
+bool VideoDecoderSoftwareFallbackWrapper::PrefersLateDecoding() const {
+ return use_hw_decoder_ ? hw_decoder_->PrefersLateDecoding()
+ : fallback_decoder_->PrefersLateDecoding();
+}
+
+const char* VideoDecoderSoftwareFallbackWrapper::ImplementationName() const {
+ return use_hw_decoder_ ? hw_decoder_->ImplementationName()
+ : fallback_implementation_name_.c_str();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/videodecodersoftwarefallbackwrapper.h b/third_party/libwebrtc/webrtc/media/engine/videodecodersoftwarefallbackwrapper.h
new file mode 100644
index 0000000000..97953ddc82
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/videodecodersoftwarefallbackwrapper.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_VIDEODECODERSOFTWAREFALLBACKWRAPPER_H_
+#define MEDIA_ENGINE_VIDEODECODERSOFTWAREFALLBACKWRAPPER_H_
+
+#include <memory>
+#include <string>
+
+#include "api/video_codecs/video_decoder.h"
+
+namespace webrtc {
+
+// Class used to wrap external VideoDecoders to provide a fallback option on
+// software decoding when a hardware decoder fails to decode a stream due to
+// hardware restrictions, such as max resolution.
+class VideoDecoderSoftwareFallbackWrapper : public VideoDecoder {
+ public:
+ VideoDecoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoDecoder> sw_fallback_decoder,
+ std::unique_ptr<VideoDecoder> hw_decoder);
+
+ int32_t InitDecode(const VideoCodec* codec_settings,
+ int32_t number_of_cores) override;
+
+ int32_t Decode(const EncodedImage& input_image,
+ bool missing_frames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codec_specific_info,
+ int64_t render_time_ms) override;
+
+ int32_t RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) override;
+
+ int32_t Release() override;
+ bool PrefersLateDecoding() const override;
+
+ const char* ImplementationName() const override;
+
+ private:
+ bool InitFallbackDecoder();
+
+ // Determines if we are trying to use the HW or SW decoder.
+ bool use_hw_decoder_;
+ std::unique_ptr<VideoDecoder> hw_decoder_;
+ bool hw_decoder_initialized_;
+
+ VideoCodec codec_settings_;
+ int32_t number_of_cores_;
+ const std::unique_ptr<VideoDecoder> fallback_decoder_;
+ const std::string fallback_implementation_name_;
+ DecodedImageCallback* callback_;
+};
+
+} // namespace webrtc
+
+#endif // MEDIA_ENGINE_VIDEODECODERSOFTWAREFALLBACKWRAPPER_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/videodecodersoftwarefallbackwrapper_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/videodecodersoftwarefallbackwrapper_unittest.cc
new file mode 100644
index 0000000000..367527eff7
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/videodecodersoftwarefallbackwrapper_unittest.cc
@@ -0,0 +1,209 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/videodecodersoftwarefallbackwrapper.h"
+#include "api/video_codecs/video_decoder.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/checks.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+class VideoDecoderSoftwareFallbackWrapperTest : public ::testing::Test {
+ protected:
+ VideoDecoderSoftwareFallbackWrapperTest()
+ : fake_decoder_(new CountingFakeDecoder()),
+ fallback_wrapper_(std::unique_ptr<VideoDecoder>(VP8Decoder::Create()),
+ std::unique_ptr<VideoDecoder>(fake_decoder_)) {}
+
+ class CountingFakeDecoder : public VideoDecoder {
+ public:
+ int32_t InitDecode(const VideoCodec* codec_settings,
+ int32_t number_of_cores) override {
+ ++init_decode_count_;
+ return init_decode_return_code_;
+ }
+
+ int32_t Decode(const EncodedImage& input_image,
+ bool missing_frames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codec_specific_info,
+ int64_t render_time_ms) override {
+ ++decode_count_;
+ return decode_return_code_;
+ }
+
+ int32_t RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) override {
+ decode_complete_callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t Release() override {
+ ++release_count_;
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ const char* ImplementationName() const override {
+ return "fake-decoder";
+ }
+
+ int init_decode_count_ = 0;
+ int decode_count_ = 0;
+ int32_t init_decode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
+ int32_t decode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
+ DecodedImageCallback* decode_complete_callback_ = nullptr;
+ int release_count_ = 0;
+ int reset_count_ = 0;
+ };
+ // |fake_decoder_| is owned and released by |fallback_wrapper_|.
+ CountingFakeDecoder* fake_decoder_;
+ VideoDecoderSoftwareFallbackWrapper fallback_wrapper_;
+};
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest, InitializesDecoder) {
+ VideoCodec codec = {};
+ fallback_wrapper_.InitDecode(&codec, 2);
+ EXPECT_EQ(1, fake_decoder_->init_decode_count_);
+
+ EncodedImage encoded_image;
+ encoded_image._frameType = kVideoFrameKey;
+ fallback_wrapper_.Decode(encoded_image, false, nullptr, nullptr, -1);
+ EXPECT_EQ(1, fake_decoder_->init_decode_count_)
+ << "Initialized decoder should not be reinitialized.";
+ EXPECT_EQ(1, fake_decoder_->decode_count_);
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
+ UsesFallbackDecoderAfterAnyInitDecodeFailure) {
+ VideoCodec codec = {};
+ fake_decoder_->init_decode_return_code_ =
+ WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ fallback_wrapper_.InitDecode(&codec, 2);
+ EXPECT_EQ(1, fake_decoder_->init_decode_count_);
+
+ EncodedImage encoded_image;
+ encoded_image._frameType = kVideoFrameKey;
+ fallback_wrapper_.Decode(encoded_image, false, nullptr, nullptr, -1);
+ EXPECT_EQ(2, fake_decoder_->init_decode_count_)
+ << "Should have attempted reinitializing the fallback decoder on "
+ "keyframe.";
+ // Unfortunately faking a VP8 frame is hard. Rely on no Decode -> using SW
+ // decoder.
+ EXPECT_EQ(0, fake_decoder_->decode_count_)
+ << "Decoder used even though no InitDecode had succeeded.";
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
+ CanRecoverFromSoftwareFallback) {
+ VideoCodec codec = {};
+ fallback_wrapper_.InitDecode(&codec, 2);
+ // Unfortunately faking a VP8 frame is hard. Rely on no Decode -> using SW
+ // decoder.
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ EncodedImage encoded_image;
+ fallback_wrapper_.Decode(encoded_image, false, nullptr, nullptr, -1);
+ EXPECT_EQ(1, fake_decoder_->decode_count_);
+
+ // Fail -> fake_decoder shouldn't be used anymore.
+ fallback_wrapper_.Decode(encoded_image, false, nullptr, nullptr, -1);
+ EXPECT_EQ(1, fake_decoder_->decode_count_)
+ << "Decoder used even though fallback should be active.";
+
+ // Should be able to recover on a keyframe.
+ encoded_image._frameType = kVideoFrameKey;
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
+ fallback_wrapper_.Decode(encoded_image, false, nullptr, nullptr, -1);
+ EXPECT_EQ(2, fake_decoder_->decode_count_)
+ << "Wrapper did not try to decode a keyframe using registered decoder.";
+
+ encoded_image._frameType = kVideoFrameDelta;
+ fallback_wrapper_.Decode(encoded_image, false, nullptr, nullptr, -1);
+ EXPECT_EQ(3, fake_decoder_->decode_count_)
+ << "Decoder not used on future delta frames.";
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest, DoesNotFallbackOnEveryError) {
+ VideoCodec codec = {};
+ fallback_wrapper_.InitDecode(&codec, 2);
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR;
+ EncodedImage encoded_image;
+ EXPECT_EQ(
+ fake_decoder_->decode_return_code_,
+ fallback_wrapper_.Decode(encoded_image, false, nullptr, nullptr, -1));
+ EXPECT_EQ(1, fake_decoder_->decode_count_);
+
+ fallback_wrapper_.Decode(encoded_image, false, nullptr, nullptr, -1);
+ EXPECT_EQ(2, fake_decoder_->decode_count_)
+ << "Decoder should be active even though previous decode failed.";
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest, ForwardsReleaseCall) {
+ VideoCodec codec = {};
+ fallback_wrapper_.InitDecode(&codec, 2);
+ fallback_wrapper_.Release();
+ EXPECT_EQ(1, fake_decoder_->release_count_);
+
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ EncodedImage encoded_image;
+ fallback_wrapper_.Decode(encoded_image, false, nullptr, nullptr, -1);
+ EXPECT_EQ(1, fake_decoder_->release_count_)
+ << "Decoder should not be released during fallback.";
+ fallback_wrapper_.Release();
+ EXPECT_EQ(2, fake_decoder_->release_count_);
+}
+
+// TODO(pbos): Fake a VP8 frame well enough to actually receive a callback from
+// the software decoder.
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
+ ForwardsRegisterDecodeCompleteCallback) {
+ class FakeDecodedImageCallback : public DecodedImageCallback {
+ int32_t Decoded(VideoFrame& decodedImage) override { return 0; }
+ int32_t Decoded(
+ webrtc::VideoFrame& decodedImage, int64_t decode_time_ms) override {
+ RTC_NOTREACHED();
+ return -1;
+ }
+ void Decoded(webrtc::VideoFrame& decodedImage,
+ rtc::Optional<int32_t> decode_time_ms,
+ rtc::Optional<uint8_t> qp) override {
+ RTC_NOTREACHED();
+ }
+ } callback, callback2;
+
+ VideoCodec codec = {};
+ fallback_wrapper_.InitDecode(&codec, 2);
+ fallback_wrapper_.RegisterDecodeCompleteCallback(&callback);
+ EXPECT_EQ(&callback, fake_decoder_->decode_complete_callback_);
+
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ EncodedImage encoded_image;
+ fallback_wrapper_.Decode(encoded_image, false, nullptr, nullptr, -1);
+ fallback_wrapper_.RegisterDecodeCompleteCallback(&callback2);
+ EXPECT_EQ(&callback2, fake_decoder_->decode_complete_callback_);
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
+ ReportsFallbackImplementationName) {
+ VideoCodec codec = {};
+ fallback_wrapper_.InitDecode(&codec, 2);
+
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ EncodedImage encoded_image;
+ fallback_wrapper_.Decode(encoded_image, false, nullptr, nullptr, -1);
+ // Hard coded expected value since libvpx is the software implementation name
+ // for VP8. Change accordingly if the underlying implementation does.
+ EXPECT_STREQ("libvpx (fallback from: fake-decoder)",
+ fallback_wrapper_.ImplementationName());
+ fallback_wrapper_.Release();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/videoencodersoftwarefallbackwrapper.cc b/third_party/libwebrtc/webrtc/media/engine/videoencodersoftwarefallbackwrapper.cc
new file mode 100644
index 0000000000..69254f5fb5
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/videoencodersoftwarefallbackwrapper.cc
@@ -0,0 +1,308 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/videoencodersoftwarefallbackwrapper.h"
+
+#include <utility>
+
+#include "media/base/h264_profile_level_id.h"
+#include "media/engine/internalencoderfactory.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/timeutils.h"
+#include "system_wrappers/include/field_trial.h"
+
+namespace webrtc {
+namespace {
+const char kVp8ForceFallbackEncoderFieldTrial[] =
+ "WebRTC-VP8-Forced-Fallback-Encoder-v2";
+
+bool EnableForcedFallback() {
+ return field_trial::IsEnabled(kVp8ForceFallbackEncoderFieldTrial);
+}
+
+bool IsForcedFallbackPossible(const VideoCodec& codec_settings) {
+ return codec_settings.codecType == kVideoCodecVP8 &&
+ codec_settings.numberOfSimulcastStreams <= 1 &&
+ codec_settings.VP8().numberOfTemporalLayers == 1;
+}
+
+void GetForcedFallbackParamsFromFieldTrialGroup(int* param_min_pixels,
+ int* param_max_pixels,
+ int minimum_max_pixels) {
+ RTC_DCHECK(param_min_pixels);
+ RTC_DCHECK(param_max_pixels);
+ std::string group =
+ webrtc::field_trial::FindFullName(kVp8ForceFallbackEncoderFieldTrial);
+ if (group.empty())
+ return;
+
+ int min_pixels;
+ int max_pixels;
+ int min_bps;
+ if (sscanf(group.c_str(), "Enabled-%d,%d,%d", &min_pixels, &max_pixels,
+ &min_bps) != 3) {
+ RTC_LOG(LS_WARNING)
+ << "Invalid number of forced fallback parameters provided.";
+ return;
+ }
+ if (min_pixels <= 0 || max_pixels < minimum_max_pixels ||
+ max_pixels < min_pixels || min_bps <= 0) {
+ RTC_LOG(LS_WARNING) << "Invalid forced fallback parameter value provided.";
+ return;
+ }
+ *param_min_pixels = min_pixels;
+ *param_max_pixels = max_pixels;
+}
+} // namespace
+
+VideoEncoderSoftwareFallbackWrapper::VideoEncoderSoftwareFallbackWrapper(
+ std::unique_ptr<webrtc::VideoEncoder> sw_encoder,
+ std::unique_ptr<webrtc::VideoEncoder> hw_encoder)
+ : number_of_cores_(0),
+ max_payload_size_(0),
+ rates_set_(false),
+ framerate_(0),
+ channel_parameters_set_(false),
+ packet_loss_(0),
+ rtt_(0),
+ use_fallback_encoder_(false),
+ encoder_(std::move(hw_encoder)),
+ fallback_encoder_(std::move(sw_encoder)),
+ callback_(nullptr),
+ forced_fallback_possible_(EnableForcedFallback()) {
+ if (forced_fallback_possible_) {
+ GetForcedFallbackParamsFromFieldTrialGroup(
+ &forced_fallback_.min_pixels_, &forced_fallback_.max_pixels_,
+ encoder_->GetScalingSettings().min_pixels_per_frame -
+ 1); // No HW below.
+ }
+}
+
+bool VideoEncoderSoftwareFallbackWrapper::InitFallbackEncoder() {
+ RTC_LOG(LS_WARNING) << "Encoder falling back to software encoding.";
+
+ const int ret = fallback_encoder_->InitEncode(
+ &codec_settings_, number_of_cores_, max_payload_size_);
+ use_fallback_encoder_ = (ret == WEBRTC_VIDEO_CODEC_OK);
+ if (!use_fallback_encoder_) {
+ RTC_LOG(LS_ERROR) << "Failed to initialize software-encoder fallback.";
+ fallback_encoder_->Release();
+ return false;
+ }
+ // Replay callback, rates, and channel parameters.
+ if (callback_)
+ fallback_encoder_->RegisterEncodeCompleteCallback(callback_);
+ if (rates_set_)
+ fallback_encoder_->SetRateAllocation(bitrate_allocation_, framerate_);
+ if (channel_parameters_set_)
+ fallback_encoder_->SetChannelParameters(packet_loss_, rtt_);
+
+ // Since we're switching to the fallback encoder, Release the real encoder. It
+ // may be re-initialized via InitEncode later, and it will continue to get
+ // Set calls for rates and channel parameters in the meantime.
+ encoder_->Release();
+ return true;
+}
+
+int32_t VideoEncoderSoftwareFallbackWrapper::InitEncode(
+ const VideoCodec* codec_settings,
+ int32_t number_of_cores,
+ size_t max_payload_size) {
+ // Store settings, in case we need to dynamically switch to the fallback
+ // encoder after a failed Encode call.
+ codec_settings_ = *codec_settings;
+ number_of_cores_ = number_of_cores;
+ max_payload_size_ = max_payload_size;
+ // Clear stored rate/channel parameters.
+ rates_set_ = false;
+ channel_parameters_set_ = false;
+ ValidateSettingsForForcedFallback();
+
+ // Try to reinit forced software codec if it is in use.
+ if (TryReInitForcedFallbackEncoder()) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ // Try to init forced software codec if it should be used.
+ if (TryInitForcedFallbackEncoder()) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ forced_fallback_.active_ = false;
+
+ int32_t ret =
+ encoder_->InitEncode(codec_settings, number_of_cores, max_payload_size);
+ if (ret == WEBRTC_VIDEO_CODEC_OK) {
+ if (use_fallback_encoder_) {
+ RTC_LOG(LS_WARNING)
+ << "InitEncode OK, no longer using the software fallback encoder.";
+ fallback_encoder_->Release();
+ use_fallback_encoder_ = false;
+ }
+ if (callback_)
+ encoder_->RegisterEncodeCompleteCallback(callback_);
+ return ret;
+ }
+ // Try to instantiate software codec.
+ if (InitFallbackEncoder()) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ // Software encoder failed, use original return code.
+ return ret;
+}
+
+int32_t VideoEncoderSoftwareFallbackWrapper::RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) {
+ callback_ = callback;
+ int32_t ret = encoder_->RegisterEncodeCompleteCallback(callback);
+ if (use_fallback_encoder_)
+ return fallback_encoder_->RegisterEncodeCompleteCallback(callback);
+ return ret;
+}
+
+int32_t VideoEncoderSoftwareFallbackWrapper::Release() {
+ return use_fallback_encoder_ ? fallback_encoder_->Release()
+ : encoder_->Release();
+}
+
+int32_t VideoEncoderSoftwareFallbackWrapper::Encode(
+ const VideoFrame& frame,
+ const CodecSpecificInfo* codec_specific_info,
+ const std::vector<FrameType>* frame_types) {
+ if (use_fallback_encoder_)
+ return fallback_encoder_->Encode(frame, codec_specific_info, frame_types);
+ int32_t ret = encoder_->Encode(frame, codec_specific_info, frame_types);
+ // If requested, try a software fallback.
+ bool fallback_requested = (ret == WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE);
+ if (fallback_requested && InitFallbackEncoder()) {
+ if (frame.video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative &&
+ !fallback_encoder_->SupportsNativeHandle()) {
+ RTC_LOG(LS_WARNING) << "Fallback encoder doesn't support native frames, "
+ << "dropping one frame.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ // Start using the fallback with this frame.
+ return fallback_encoder_->Encode(frame, codec_specific_info, frame_types);
+ }
+ return ret;
+}
+
+int32_t VideoEncoderSoftwareFallbackWrapper::SetChannelParameters(
+ uint32_t packet_loss,
+ int64_t rtt) {
+ channel_parameters_set_ = true;
+ packet_loss_ = packet_loss;
+ rtt_ = rtt;
+ int32_t ret = encoder_->SetChannelParameters(packet_loss, rtt);
+ if (use_fallback_encoder_)
+ return fallback_encoder_->SetChannelParameters(packet_loss, rtt);
+ return ret;
+}
+
+int32_t VideoEncoderSoftwareFallbackWrapper::SetRateAllocation(
+ const BitrateAllocation& bitrate_allocation,
+ uint32_t framerate) {
+ rates_set_ = true;
+ bitrate_allocation_ = bitrate_allocation;
+ framerate_ = framerate;
+ int32_t ret = encoder_->SetRateAllocation(bitrate_allocation_, framerate);
+ if (use_fallback_encoder_)
+ return fallback_encoder_->SetRateAllocation(bitrate_allocation_, framerate);
+ return ret;
+}
+
+bool VideoEncoderSoftwareFallbackWrapper::SupportsNativeHandle() const {
+ return use_fallback_encoder_ ? fallback_encoder_->SupportsNativeHandle()
+ : encoder_->SupportsNativeHandle();
+}
+
+VideoEncoder::ScalingSettings
+VideoEncoderSoftwareFallbackWrapper::GetScalingSettings() const {
+ if (forced_fallback_possible_) {
+ if (forced_fallback_.active_) {
+ return VideoEncoder::ScalingSettings(
+ codec_settings_.VP8().automaticResizeOn,
+ forced_fallback_.min_pixels_);
+ }
+ const auto settings = encoder_->GetScalingSettings();
+ if (settings.thresholds) {
+ return VideoEncoder::ScalingSettings(
+ settings.enabled, settings.thresholds->low, settings.thresholds->high,
+ forced_fallback_.min_pixels_);
+ }
+ return VideoEncoder::ScalingSettings(settings.enabled,
+ forced_fallback_.min_pixels_);
+ }
+ return encoder_->GetScalingSettings();
+}
+
+const char* VideoEncoderSoftwareFallbackWrapper::ImplementationName() const {
+ return use_fallback_encoder_ ? fallback_encoder_->ImplementationName()
+ : encoder_->ImplementationName();
+}
+
+bool VideoEncoderSoftwareFallbackWrapper::IsForcedFallbackActive() const {
+ return (forced_fallback_possible_ && use_fallback_encoder_ &&
+ forced_fallback_.active_);
+}
+
+bool VideoEncoderSoftwareFallbackWrapper::TryInitForcedFallbackEncoder() {
+ if (!forced_fallback_possible_ || use_fallback_encoder_) {
+ return false;
+ }
+ // Fallback not active.
+ if (!forced_fallback_.IsValid(codec_settings_)) {
+ return false;
+ }
+ // Settings valid, try to instantiate software codec.
+ RTC_LOG(LS_INFO) << "Request forced SW encoder fallback: "
+ << codec_settings_.width << "x" << codec_settings_.height;
+ if (!InitFallbackEncoder()) {
+ return false;
+ }
+ forced_fallback_.active_ = true;
+ return true;
+}
+
+bool VideoEncoderSoftwareFallbackWrapper::TryReInitForcedFallbackEncoder() {
+ if (!IsForcedFallbackActive()) {
+ return false;
+ }
+ // Forced fallback active.
+ if (!forced_fallback_.IsValid(codec_settings_)) {
+ RTC_LOG(LS_INFO) << "Stop forced SW encoder fallback, max pixels exceeded.";
+ return false;
+ }
+ // Settings valid, reinitialize the forced fallback encoder.
+ if (fallback_encoder_->InitEncode(&codec_settings_, number_of_cores_,
+ max_payload_size_) !=
+ WEBRTC_VIDEO_CODEC_OK) {
+ RTC_LOG(LS_ERROR) << "Failed to init forced SW encoder fallback.";
+ return false;
+ }
+ return true;
+}
+
+void VideoEncoderSoftwareFallbackWrapper::ValidateSettingsForForcedFallback() {
+ if (!forced_fallback_possible_)
+ return;
+
+ if (!IsForcedFallbackPossible(codec_settings_)) {
+ if (IsForcedFallbackActive()) {
+ fallback_encoder_->Release();
+ use_fallback_encoder_ = false;
+ }
+ RTC_LOG(LS_INFO) << "Disable forced_fallback_possible_ due to settings.";
+ forced_fallback_possible_ = false;
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/videoencodersoftwarefallbackwrapper.h b/third_party/libwebrtc/webrtc/media/engine/videoencodersoftwarefallbackwrapper.h
new file mode 100644
index 0000000000..a9a349c221
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/videoencodersoftwarefallbackwrapper.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_VIDEOENCODERSOFTWAREFALLBACKWRAPPER_H_
+#define MEDIA_ENGINE_VIDEOENCODERSOFTWAREFALLBACKWRAPPER_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/video_codecs/video_encoder.h"
+#include "media/base/codec.h"
+
+namespace webrtc {
+
+// Class used to wrap external VideoEncoders to provide a fallback option on
+// software encoding when a hardware encoder fails to encode a stream due to
+// hardware restrictions, such as max resolution.
+class VideoEncoderSoftwareFallbackWrapper : public VideoEncoder {
+ public:
+ VideoEncoderSoftwareFallbackWrapper(
+ std::unique_ptr<webrtc::VideoEncoder> sw_encoder,
+ std::unique_ptr<webrtc::VideoEncoder> hw_encoder);
+
+ int32_t InitEncode(const VideoCodec* codec_settings,
+ int32_t number_of_cores,
+ size_t max_payload_size) override;
+
+ int32_t RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) override;
+
+ int32_t Release() override;
+ int32_t Encode(const VideoFrame& frame,
+ const CodecSpecificInfo* codec_specific_info,
+ const std::vector<FrameType>* frame_types) override;
+ int32_t SetChannelParameters(uint32_t packet_loss, int64_t rtt) override;
+ int32_t SetRateAllocation(const BitrateAllocation& bitrate_allocation,
+ uint32_t framerate) override;
+ bool SupportsNativeHandle() const override;
+ ScalingSettings GetScalingSettings() const override;
+ const char *ImplementationName() const override;
+
+ private:
+ bool InitFallbackEncoder();
+
+ // If |forced_fallback_possible_| is true:
+ // The forced fallback is requested if the resolution is less than or equal to
+ // |max_pixels_|. The resolution is allowed to be scaled down to
+ // |min_pixels_|.
+ class ForcedFallbackParams {
+ public:
+ bool IsValid(const VideoCodec& codec) const {
+ return codec.width * codec.height <= max_pixels_;
+ }
+
+ bool active_ = false;
+ int min_pixels_ = 320 * 180;
+ int max_pixels_ = 320 * 240;
+ };
+
+ bool TryInitForcedFallbackEncoder();
+ bool TryReInitForcedFallbackEncoder();
+ void ValidateSettingsForForcedFallback();
+ bool IsForcedFallbackActive() const;
+ void MaybeModifyCodecForFallback();
+
+ // Settings used in the last InitEncode call and used if a dynamic fallback to
+ // software is required.
+ VideoCodec codec_settings_;
+ int32_t number_of_cores_;
+ size_t max_payload_size_;
+
+ // The last bitrate/framerate set, and a flag for noting they are set.
+ bool rates_set_;
+ BitrateAllocation bitrate_allocation_;
+ uint32_t framerate_;
+
+ // The last channel parameters set, and a flag for noting they are set.
+ bool channel_parameters_set_;
+ uint32_t packet_loss_;
+ int64_t rtt_;
+
+ bool use_fallback_encoder_;
+ const std::unique_ptr<webrtc::VideoEncoder> encoder_;
+
+ const std::unique_ptr<webrtc::VideoEncoder> fallback_encoder_;
+ EncodedImageCallback* callback_;
+
+ bool forced_fallback_possible_;
+ ForcedFallbackParams forced_fallback_;
+};
+
+} // namespace webrtc
+
+#endif // MEDIA_ENGINE_VIDEOENCODERSOFTWAREFALLBACKWRAPPER_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/videoencodersoftwarefallbackwrapper_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/videoencodersoftwarefallbackwrapper_unittest.cc
new file mode 100644
index 0000000000..e39b02f71c
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/videoencodersoftwarefallbackwrapper_unittest.cc
@@ -0,0 +1,544 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/videoencodersoftwarefallbackwrapper.h"
+
+#include <utility>
+
+#include "api/video/i420_buffer.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
+#include "modules/video_coding/codecs/vp8/temporal_layers.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/fakeclock.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+const int kWidth = 320;
+const int kHeight = 240;
+const int kNumCores = 2;
+const uint32_t kFramerate = 30;
+const size_t kMaxPayloadSize = 800;
+const int kDefaultMinPixelsPerFrame = 320 * 180;
+const int kLowThreshold = 10;
+const int kHighThreshold = 20;
+} // namespace
+
+class VideoEncoderSoftwareFallbackWrapperTest : public ::testing::Test {
+ protected:
+ VideoEncoderSoftwareFallbackWrapperTest()
+ : VideoEncoderSoftwareFallbackWrapperTest("") {}
+ explicit VideoEncoderSoftwareFallbackWrapperTest(
+ const std::string& field_trials)
+ : override_field_trials_(field_trials),
+ fake_encoder_(new CountingFakeEncoder()),
+ fallback_wrapper_(std::unique_ptr<VideoEncoder>(VP8Encoder::Create()),
+ std::unique_ptr<VideoEncoder>(fake_encoder_)) {}
+
+ class CountingFakeEncoder : public VideoEncoder {
+ public:
+ int32_t InitEncode(const VideoCodec* codec_settings,
+ int32_t number_of_cores,
+ size_t max_payload_size) override {
+ ++init_encode_count_;
+ return init_encode_return_code_;
+ }
+ int32_t Encode(const VideoFrame& frame,
+ const CodecSpecificInfo* codec_specific_info,
+ const std::vector<FrameType>* frame_types) override {
+ ++encode_count_;
+ if (encode_complete_callback_ &&
+ encode_return_code_ == WEBRTC_VIDEO_CODEC_OK) {
+ CodecSpecificInfo info;
+ info.codec_name = ImplementationName();
+ encode_complete_callback_->OnEncodedImage(EncodedImage(), &info,
+ nullptr);
+ }
+ return encode_return_code_;
+ }
+
+ int32_t RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) override {
+ encode_complete_callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t Release() override {
+ ++release_count_;
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t SetChannelParameters(uint32_t packet_loss, int64_t rtt) override {
+ ++set_channel_parameters_count_;
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t SetRateAllocation(const BitrateAllocation& bitrate_allocation,
+ uint32_t framerate) override {
+ ++set_rates_count_;
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ bool SupportsNativeHandle() const override {
+ ++supports_native_handle_count_;
+ return supports_native_handle_;
+ }
+
+ const char* ImplementationName() const override {
+ return "fake-encoder";
+ }
+
+ VideoEncoder::ScalingSettings GetScalingSettings() const override {
+ return VideoEncoder::ScalingSettings(true, kLowThreshold, kHighThreshold);
+ }
+
+ int init_encode_count_ = 0;
+ int32_t init_encode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
+ int32_t encode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
+ int encode_count_ = 0;
+ EncodedImageCallback* encode_complete_callback_ = nullptr;
+ int release_count_ = 0;
+ int set_channel_parameters_count_ = 0;
+ int set_rates_count_ = 0;
+ mutable int supports_native_handle_count_ = 0;
+ bool supports_native_handle_ = false;
+ };
+
+ class FakeEncodedImageCallback : public EncodedImageCallback {
+ public:
+ Result OnEncodedImage(
+ const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) override {
+ ++callback_count_;
+ last_codec_name_ = codec_specific_info->codec_name;
+ return Result(Result::OK, callback_count_);
+ }
+ int callback_count_ = 0;
+ std::string last_codec_name_;
+ };
+
+ void UtilizeFallbackEncoder();
+ void FallbackFromEncodeRequest();
+ void EncodeFrame();
+ void EncodeFrame(int expected_ret);
+ void CheckLastEncoderName(const char* expected_name) {
+ EXPECT_STREQ(expected_name, callback_.last_codec_name_.c_str());
+ }
+
+ test::ScopedFieldTrials override_field_trials_;
+ FakeEncodedImageCallback callback_;
+ // |fake_encoder_| is owned and released by |fallback_wrapper_|.
+ CountingFakeEncoder* fake_encoder_;
+ VideoEncoderSoftwareFallbackWrapper fallback_wrapper_;
+ VideoCodec codec_ = {};
+ std::unique_ptr<VideoFrame> frame_;
+ std::unique_ptr<SimulcastRateAllocator> rate_allocator_;
+};
+
+void VideoEncoderSoftwareFallbackWrapperTest::EncodeFrame() {
+ EncodeFrame(WEBRTC_VIDEO_CODEC_OK);
+}
+
+void VideoEncoderSoftwareFallbackWrapperTest::EncodeFrame(int expected_ret) {
+ rtc::scoped_refptr<I420Buffer> buffer =
+ I420Buffer::Create(codec_.width, codec_.height);
+ I420Buffer::SetBlack(buffer);
+ std::vector<FrameType> types(1, kVideoFrameKey);
+
+ frame_.reset(new VideoFrame(buffer, 0, 0, webrtc::kVideoRotation_0));
+ EXPECT_EQ(expected_ret, fallback_wrapper_.Encode(*frame_, nullptr, &types));
+}
+
+void VideoEncoderSoftwareFallbackWrapperTest::UtilizeFallbackEncoder() {
+ fallback_wrapper_.RegisterEncodeCompleteCallback(&callback_);
+ EXPECT_EQ(&callback_, fake_encoder_->encode_complete_callback_);
+
+ // Register with failing fake encoder. Should succeed with VP8 fallback.
+ codec_.codecType = kVideoCodecVP8;
+ codec_.maxFramerate = kFramerate;
+ codec_.width = kWidth;
+ codec_.height = kHeight;
+ codec_.VP8()->numberOfTemporalLayers = 1;
+ std::unique_ptr<TemporalLayersFactory> tl_factory(
+ new TemporalLayersFactory());
+ codec_.VP8()->tl_factory = tl_factory.get();
+ rate_allocator_.reset(
+ new SimulcastRateAllocator(codec_, std::move(tl_factory)));
+
+ fake_encoder_->init_encode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR;
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ fallback_wrapper_.InitEncode(&codec_, kNumCores, kMaxPayloadSize));
+ EXPECT_EQ(
+ WEBRTC_VIDEO_CODEC_OK,
+ fallback_wrapper_.SetRateAllocation(
+ rate_allocator_->GetAllocation(300000, kFramerate), kFramerate));
+
+ int callback_count = callback_.callback_count_;
+ int encode_count = fake_encoder_->encode_count_;
+ EncodeFrame();
+ EXPECT_EQ(encode_count, fake_encoder_->encode_count_);
+ EXPECT_EQ(callback_count + 1, callback_.callback_count_);
+}
+
+void VideoEncoderSoftwareFallbackWrapperTest::FallbackFromEncodeRequest() {
+ fallback_wrapper_.RegisterEncodeCompleteCallback(&callback_);
+ codec_.codecType = kVideoCodecVP8;
+ codec_.maxFramerate = kFramerate;
+ codec_.width = kWidth;
+ codec_.height = kHeight;
+ codec_.VP8()->numberOfTemporalLayers = 1;
+ std::unique_ptr<TemporalLayersFactory> tl_factory(
+ new TemporalLayersFactory());
+ codec_.VP8()->tl_factory = tl_factory.get();
+ rate_allocator_.reset(
+ new SimulcastRateAllocator(codec_, std::move(tl_factory)));
+ fallback_wrapper_.InitEncode(&codec_, 2, kMaxPayloadSize);
+ EXPECT_EQ(
+ WEBRTC_VIDEO_CODEC_OK,
+ fallback_wrapper_.SetRateAllocation(
+ rate_allocator_->GetAllocation(300000, kFramerate), kFramerate));
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+
+ // Have the non-fallback encoder request a software fallback.
+ fake_encoder_->encode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ int callback_count = callback_.callback_count_;
+ int encode_count = fake_encoder_->encode_count_;
+ EncodeFrame();
+ // Single encode request, which returned failure.
+ EXPECT_EQ(encode_count + 1, fake_encoder_->encode_count_);
+ EXPECT_EQ(callback_count + 1, callback_.callback_count_);
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest, InitializesEncoder) {
+ VideoCodec codec = {};
+ fallback_wrapper_.InitEncode(&codec, 2, kMaxPayloadSize);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest, EncodeRequestsFallback) {
+ FallbackFromEncodeRequest();
+ // After fallback, further encodes shouldn't hit the fake encoder.
+ int encode_count = fake_encoder_->encode_count_;
+ EncodeFrame();
+ EXPECT_EQ(encode_count, fake_encoder_->encode_count_);
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest, CanUtilizeFallbackEncoder) {
+ UtilizeFallbackEncoder();
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release());
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ InternalEncoderReleasedDuringFallback) {
+ EXPECT_EQ(0, fake_encoder_->release_count_);
+ UtilizeFallbackEncoder();
+ EXPECT_EQ(1, fake_encoder_->release_count_);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release());
+ // No extra release when the fallback is released.
+ EXPECT_EQ(1, fake_encoder_->release_count_);
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ InternalEncoderNotEncodingDuringFallback) {
+ UtilizeFallbackEncoder();
+ int encode_count = fake_encoder_->encode_count_;
+ EncodeFrame();
+ EXPECT_EQ(encode_count, fake_encoder_->encode_count_);
+
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release());
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ CanRegisterCallbackWhileUsingFallbackEncoder) {
+ UtilizeFallbackEncoder();
+ // Registering an encode-complete callback should still work when fallback
+ // encoder is being used.
+ FakeEncodedImageCallback callback2;
+ fallback_wrapper_.RegisterEncodeCompleteCallback(&callback2);
+ EXPECT_EQ(&callback2, fake_encoder_->encode_complete_callback_);
+
+ // Encoding a frame using the fallback should arrive at the new callback.
+ std::vector<FrameType> types(1, kVideoFrameKey);
+ frame_->set_timestamp(frame_->timestamp() + 1000);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ fallback_wrapper_.Encode(*frame_, nullptr, &types));
+
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release());
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ SetChannelParametersForwardedDuringFallback) {
+ UtilizeFallbackEncoder();
+ EXPECT_EQ(0, fake_encoder_->set_channel_parameters_count_);
+ fallback_wrapper_.SetChannelParameters(1, 1);
+ EXPECT_EQ(1, fake_encoder_->set_channel_parameters_count_);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release());
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ SetRatesForwardedDuringFallback) {
+ UtilizeFallbackEncoder();
+ EXPECT_EQ(1, fake_encoder_->set_rates_count_);
+ fallback_wrapper_.SetRateAllocation(BitrateAllocation(), 1);
+ EXPECT_EQ(2, fake_encoder_->set_rates_count_);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release());
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ SupportsNativeHandleForwardedWithoutFallback) {
+ fallback_wrapper_.SupportsNativeHandle();
+ EXPECT_EQ(1, fake_encoder_->supports_native_handle_count_);
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ SupportsNativeHandleNotForwardedDuringFallback) {
+ UtilizeFallbackEncoder();
+ fallback_wrapper_.SupportsNativeHandle();
+ EXPECT_EQ(0, fake_encoder_->supports_native_handle_count_);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release());
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest, ReportsImplementationName) {
+ codec_.width = kWidth;
+ codec_.height = kHeight;
+ fallback_wrapper_.RegisterEncodeCompleteCallback(&callback_);
+ fallback_wrapper_.InitEncode(&codec_, kNumCores, kMaxPayloadSize);
+ EncodeFrame();
+ CheckLastEncoderName("fake-encoder");
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ ReportsFallbackImplementationName) {
+ UtilizeFallbackEncoder();
+ // Hard coded expected value since libvpx is the software implementation name
+ // for VP8. Change accordingly if the underlying implementation does.
+ CheckLastEncoderName("libvpx");
+}
+
+namespace {
+const int kBitrateKbps = 200;
+const int kMinPixelsPerFrame = 1;
+const char kFieldTrial[] = "WebRTC-VP8-Forced-Fallback-Encoder-v2";
+} // namespace
+
+class ForcedFallbackTest : public VideoEncoderSoftwareFallbackWrapperTest {
+ public:
+ explicit ForcedFallbackTest(const std::string& field_trials)
+ : VideoEncoderSoftwareFallbackWrapperTest(field_trials) {}
+
+ ~ForcedFallbackTest() override {}
+
+ protected:
+ void SetUp() override {
+ clock_.SetTimeMicros(1234);
+ ConfigureVp8Codec();
+ }
+
+ void TearDown() override {
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release());
+ }
+
+ void ConfigureVp8Codec() {
+ fallback_wrapper_.RegisterEncodeCompleteCallback(&callback_);
+ std::unique_ptr<TemporalLayersFactory> tl_factory(
+ new TemporalLayersFactory());
+ codec_.codecType = kVideoCodecVP8;
+ codec_.maxFramerate = kFramerate;
+ codec_.width = kWidth;
+ codec_.height = kHeight;
+ codec_.VP8()->numberOfTemporalLayers = 1;
+ codec_.VP8()->automaticResizeOn = true;
+ codec_.VP8()->frameDroppingOn = true;
+ codec_.VP8()->tl_factory = tl_factory.get();
+ rate_allocator_.reset(
+ new SimulcastRateAllocator(codec_, std::move(tl_factory)));
+ }
+
+ void InitEncode(int width, int height) {
+ codec_.width = width;
+ codec_.height = height;
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.InitEncode(
+ &codec_, kNumCores, kMaxPayloadSize));
+ SetRateAllocation(kBitrateKbps);
+ }
+
+ void SetRateAllocation(uint32_t bitrate_kbps) {
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.SetRateAllocation(
+ rate_allocator_->GetAllocation(
+ bitrate_kbps * 1000, kFramerate),
+ kFramerate));
+ }
+
+ void EncodeFrameAndVerifyLastName(const char* expected_name) {
+ EncodeFrameAndVerifyLastName(expected_name, WEBRTC_VIDEO_CODEC_OK);
+ }
+
+ void EncodeFrameAndVerifyLastName(const char* expected_name,
+ int expected_ret) {
+ EncodeFrame(expected_ret);
+ CheckLastEncoderName(expected_name);
+ }
+
+ rtc::ScopedFakeClock clock_;
+};
+
+class ForcedFallbackTestEnabled : public ForcedFallbackTest {
+ public:
+ ForcedFallbackTestEnabled()
+ : ForcedFallbackTest(std::string(kFieldTrial) + "/Enabled-" +
+ std::to_string(kMinPixelsPerFrame) + "," +
+ std::to_string(kWidth * kHeight) + ",30000/") {}
+};
+
+class ForcedFallbackTestDisabled : public ForcedFallbackTest {
+ public:
+ ForcedFallbackTestDisabled()
+ : ForcedFallbackTest(std::string(kFieldTrial) + "/Disabled/") {}
+};
+
+TEST_F(ForcedFallbackTestDisabled, NoFallbackWithoutFieldTrial) {
+ // Resolution above max threshold.
+ InitEncode(kWidth + 1, kHeight);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+
+ // Resolution at max threshold.
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+}
+
+TEST_F(ForcedFallbackTestEnabled, FallbackIfAtMaxResolutionLimit) {
+ // Resolution above max threshold.
+ InitEncode(kWidth + 1, kHeight);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+
+ // Resolution at max threshold.
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("libvpx");
+}
+
+TEST_F(ForcedFallbackTestEnabled, FallbackIsKeptWhenInitEncodeIsCalled) {
+ // Resolution above max threshold.
+ InitEncode(kWidth + 1, kHeight);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+
+ // Resolution at max threshold.
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("libvpx");
+
+ // Re-initialize encoder, still expect fallback.
+ InitEncode(kWidth / 2, kHeight / 2);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_); // No change.
+ EncodeFrameAndVerifyLastName("libvpx");
+}
+
+TEST_F(ForcedFallbackTestEnabled, FallbackIsEndedWhenResolutionIsTooLarge) {
+ // Resolution above max threshold.
+ InitEncode(kWidth + 1, kHeight);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+
+ // Resolution at max threshold.
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("libvpx");
+
+ // Re-initialize encoder with a larger resolution, expect no fallback.
+ InitEncode(kWidth + 1, kHeight);
+ EXPECT_EQ(2, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+}
+
+TEST_F(ForcedFallbackTestEnabled, FallbackIsEndedForNonValidSettings) {
+ // Resolution at max threshold.
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("libvpx");
+
+ // Re-initialize encoder with invalid setting, expect no fallback.
+ codec_.VP8()->numberOfTemporalLayers = 2;
+ InitEncode(kWidth, kHeight);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+
+ // Re-initialize encoder with valid setting but fallback disabled from now on.
+ codec_.VP8()->numberOfTemporalLayers = 1;
+ InitEncode(kWidth, kHeight);
+ EXPECT_EQ(2, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+}
+
+TEST_F(ForcedFallbackTestEnabled, MultipleStartEndFallback) {
+ const int kNumRuns = 5;
+ for (int i = 1; i <= kNumRuns; ++i) {
+ // Resolution at max threshold.
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("libvpx");
+ // Resolution above max threshold.
+ InitEncode(kWidth + 1, kHeight);
+ EXPECT_EQ(i, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+ }
+}
+
+TEST_F(ForcedFallbackTestDisabled, GetScaleSettings) {
+ // Resolution above max threshold.
+ InitEncode(kWidth + 1, kHeight);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+
+ // Default min pixels per frame should be used.
+ const auto settings = fallback_wrapper_.GetScalingSettings();
+ EXPECT_TRUE(settings.enabled);
+ EXPECT_EQ(kDefaultMinPixelsPerFrame, settings.min_pixels_per_frame);
+}
+
+TEST_F(ForcedFallbackTestEnabled, GetScaleSettingsWithNoFallback) {
+ // Resolution above max threshold.
+ InitEncode(kWidth + 1, kHeight);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+
+ // Configured min pixels per frame should be used.
+ const auto settings = fallback_wrapper_.GetScalingSettings();
+ EXPECT_TRUE(settings.enabled);
+ EXPECT_EQ(kMinPixelsPerFrame, settings.min_pixels_per_frame);
+ ASSERT_TRUE(settings.thresholds);
+ EXPECT_EQ(kLowThreshold, settings.thresholds->low);
+ EXPECT_EQ(kHighThreshold, settings.thresholds->high);
+}
+
+TEST_F(ForcedFallbackTestEnabled, GetScaleSettingsWithFallback) {
+ // Resolution at max threshold.
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("libvpx");
+
+ // Configured min pixels per frame should be used.
+ const auto settings = fallback_wrapper_.GetScalingSettings();
+ EXPECT_TRUE(settings.enabled);
+ EXPECT_EQ(kMinPixelsPerFrame, settings.min_pixels_per_frame);
+}
+
+TEST_F(ForcedFallbackTestEnabled, ScalingDisabledIfResizeOff) {
+ // Resolution at max threshold.
+ codec_.VP8()->automaticResizeOn = false;
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("libvpx");
+
+ // Should be disabled for automatic resize off.
+ const auto settings = fallback_wrapper_.GetScalingSettings();
+ EXPECT_FALSE(settings.enabled);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/vp8_encoder_simulcast_proxy.cc b/third_party/libwebrtc/webrtc/media/engine/vp8_encoder_simulcast_proxy.cc
new file mode 100644
index 0000000000..b97e74e5ef
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/vp8_encoder_simulcast_proxy.cc
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/vp8_encoder_simulcast_proxy.h"
+
+#include "media/engine/scopedvideoencoder.h"
+#include "media/engine/simulcast_encoder_adapter.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+VP8EncoderSimulcastProxy::VP8EncoderSimulcastProxy(VideoEncoderFactory* factory)
+ : factory_(factory), callback_(nullptr) {
+ encoder_ = factory_->CreateVideoEncoder(SdpVideoFormat("VP8"));
+}
+
+VP8EncoderSimulcastProxy::~VP8EncoderSimulcastProxy() {}
+
+int VP8EncoderSimulcastProxy::Release() {
+ return encoder_->Release();
+}
+
+int VP8EncoderSimulcastProxy::InitEncode(const VideoCodec* inst,
+ int number_of_cores,
+ size_t max_payload_size) {
+ int ret = encoder_->InitEncode(inst, number_of_cores, max_payload_size);
+ if (ret == WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED) {
+ encoder_.reset(new SimulcastEncoderAdapter(factory_));
+ if (callback_) {
+ encoder_->RegisterEncodeCompleteCallback(callback_);
+ }
+ ret = encoder_->InitEncode(inst, number_of_cores, max_payload_size);
+ }
+ return ret;
+}
+
+int VP8EncoderSimulcastProxy::Encode(
+ const VideoFrame& input_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const std::vector<FrameType>* frame_types) {
+ return encoder_->Encode(input_image, codec_specific_info, frame_types);
+}
+
+int VP8EncoderSimulcastProxy::RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) {
+ callback_ = callback;
+ return encoder_->RegisterEncodeCompleteCallback(callback);
+}
+
+int VP8EncoderSimulcastProxy::SetChannelParameters(uint32_t packet_loss,
+ int64_t rtt) {
+ return encoder_->SetChannelParameters(packet_loss, rtt);
+}
+
+int VP8EncoderSimulcastProxy::SetRateAllocation(
+ const BitrateAllocation& bitrate,
+ uint32_t new_framerate) {
+ return encoder_->SetRateAllocation(bitrate, new_framerate);
+}
+
+VideoEncoder::ScalingSettings VP8EncoderSimulcastProxy::GetScalingSettings()
+ const {
+ return encoder_->GetScalingSettings();
+}
+
+int32_t VP8EncoderSimulcastProxy::SetPeriodicKeyFrames(bool enable) {
+ return encoder_->SetPeriodicKeyFrames(enable);
+}
+
+bool VP8EncoderSimulcastProxy::SupportsNativeHandle() const {
+ return encoder_->SupportsNativeHandle();
+}
+
+const char* VP8EncoderSimulcastProxy::ImplementationName() const {
+ return encoder_->ImplementationName();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/vp8_encoder_simulcast_proxy.h b/third_party/libwebrtc/webrtc/media/engine/vp8_encoder_simulcast_proxy.h
new file mode 100644
index 0000000000..5833124c50
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/vp8_encoder_simulcast_proxy.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef MEDIA_ENGINE_VP8_ENCODER_SIMULCAST_PROXY_H_
+#define MEDIA_ENGINE_VP8_ENCODER_SIMULCAST_PROXY_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/video_codecs/video_encoder_factory.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+
+namespace webrtc {
+
+// This class provides fallback to SimulcastEncoderAdapter if default VP8Encoder
+// doesn't support simulcast for provided settings.
+class VP8EncoderSimulcastProxy : public VP8Encoder {
+ public:
+ explicit VP8EncoderSimulcastProxy(VideoEncoderFactory* factory);
+ virtual ~VP8EncoderSimulcastProxy();
+
+ // Implements VideoEncoder.
+ int Release() override;
+ int InitEncode(const VideoCodec* inst,
+ int number_of_cores,
+ size_t max_payload_size) override;
+ int Encode(const VideoFrame& input_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const std::vector<FrameType>* frame_types) override;
+ int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;
+ int SetChannelParameters(uint32_t packet_loss, int64_t rtt) override;
+ int SetRateAllocation(const BitrateAllocation& bitrate,
+ uint32_t new_framerate) override;
+
+ VideoEncoder::ScalingSettings GetScalingSettings() const override;
+
+ int32_t SetPeriodicKeyFrames(bool enable) override;
+ bool SupportsNativeHandle() const override;
+ const char* ImplementationName() const override;
+
+ private:
+ VideoEncoderFactory* const factory_;
+ std::unique_ptr<VideoEncoder> encoder_;
+ EncodedImageCallback* callback_;
+};
+
+} // namespace webrtc
+
+#endif // MEDIA_ENGINE_VP8_ENCODER_SIMULCAST_PROXY_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/vp8_encoder_simulcast_proxy_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/vp8_encoder_simulcast_proxy_unittest.cc
new file mode 100644
index 0000000000..3e2761cfb6
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/vp8_encoder_simulcast_proxy_unittest.cc
@@ -0,0 +1,139 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#include "media/engine/vp8_encoder_simulcast_proxy.h"
+
+#include <string>
+
+#include "api/test/mock_video_encoder_factory.h"
+#include "media/engine/webrtcvideoencoderfactory.h"
+#include "modules/video_coding/codecs/vp8/temporal_layers.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/video_codec_settings.h"
+
+namespace webrtc {
+namespace testing {
+
+using ::testing::Return;
+using ::testing::_;
+using ::testing::NiceMock;
+
+class MockEncoder : public VideoEncoder {
+ public:
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
+ MockEncoder() {}
+ virtual ~MockEncoder() {}
+
+ MOCK_METHOD3(InitEncode,
+ int32_t(const VideoCodec* codec_settings,
+ int32_t number_of_cores,
+ size_t max_payload_size));
+
+ MOCK_METHOD1(RegisterEncodeCompleteCallback, int32_t(EncodedImageCallback*));
+
+ MOCK_METHOD0(Release, int32_t());
+
+ MOCK_METHOD3(
+ Encode,
+ int32_t(const VideoFrame& inputImage,
+ const CodecSpecificInfo* codecSpecificInfo,
+ const std::vector<FrameType>* frame_types) /* override */);
+
+ MOCK_METHOD2(SetChannelParameters, int32_t(uint32_t packetLoss, int64_t rtt));
+
+ MOCK_CONST_METHOD0(ImplementationName, const char*());
+};
+
+TEST(VP8EncoderSimulcastProxy, ChoosesCorrectImplementation) {
+ const std::string kImplementationName = "Fake";
+ const std::string kSimulcastAdaptedImplementationName =
+ "SimulcastEncoderAdapter (Fake, Fake, Fake)";
+ VideoCodec codec_settings;
+ webrtc::test::CodecSettings(kVideoCodecVP8, &codec_settings);
+ TemporalLayersFactory tl_factory;
+ codec_settings.VP8()->tl_factory = &tl_factory;
+ codec_settings.simulcastStream[0] = {
+ test::kTestWidth, test::kTestHeight, 2, 2000, 1000, 1000, 56};
+ codec_settings.simulcastStream[1] = {
+ test::kTestWidth, test::kTestHeight, 2, 3000, 1000, 1000, 56};
+ codec_settings.simulcastStream[2] = {
+ test::kTestWidth, test::kTestHeight, 2, 5000, 1000, 1000, 56};
+ codec_settings.numberOfSimulcastStreams = 3;
+
+ NiceMock<MockEncoder>* mock_encoder = new NiceMock<MockEncoder>();
+ NiceMock<MockVideoEncoderFactory> simulcast_factory;
+
+ EXPECT_CALL(*mock_encoder, InitEncode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*mock_encoder, ImplementationName())
+ .WillRepeatedly(Return(kImplementationName.c_str()));
+
+ EXPECT_CALL(simulcast_factory, CreateVideoEncoderProxy(_))
+ .Times(1)
+ .WillOnce(Return(mock_encoder));
+
+ VP8EncoderSimulcastProxy simulcast_enabled_proxy(&simulcast_factory);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ simulcast_enabled_proxy.InitEncode(&codec_settings, 4, 1200));
+ EXPECT_EQ(kImplementationName, simulcast_enabled_proxy.ImplementationName());
+
+ NiceMock<MockEncoder>* mock_encoder1 = new NiceMock<MockEncoder>();
+ NiceMock<MockEncoder>* mock_encoder2 = new NiceMock<MockEncoder>();
+ NiceMock<MockEncoder>* mock_encoder3 = new NiceMock<MockEncoder>();
+ NiceMock<MockEncoder>* mock_encoder4 = new NiceMock<MockEncoder>();
+ NiceMock<MockVideoEncoderFactory> nonsimulcast_factory;
+
+ EXPECT_CALL(*mock_encoder1, InitEncode(_, _, _))
+ .WillOnce(
+ Return(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED));
+ EXPECT_CALL(*mock_encoder1, ImplementationName())
+ .WillRepeatedly(Return(kImplementationName.c_str()));
+
+ EXPECT_CALL(*mock_encoder2, InitEncode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*mock_encoder2, ImplementationName())
+ .WillRepeatedly(Return(kImplementationName.c_str()));
+
+ EXPECT_CALL(*mock_encoder3, InitEncode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*mock_encoder3, ImplementationName())
+ .WillRepeatedly(Return(kImplementationName.c_str()));
+
+ EXPECT_CALL(*mock_encoder4, InitEncode(_, _, _))
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
+ EXPECT_CALL(*mock_encoder4, ImplementationName())
+ .WillRepeatedly(Return(kImplementationName.c_str()));
+
+ EXPECT_CALL(nonsimulcast_factory, CreateVideoEncoderProxy(_))
+ .Times(4)
+ .WillOnce(Return(mock_encoder1))
+ .WillOnce(Return(mock_encoder2))
+ .WillOnce(Return(mock_encoder3))
+ .WillOnce(Return(mock_encoder4));
+
+ VP8EncoderSimulcastProxy simulcast_disabled_proxy(&nonsimulcast_factory);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ simulcast_disabled_proxy.InitEncode(&codec_settings, 4, 1200));
+ EXPECT_EQ(kSimulcastAdaptedImplementationName,
+ simulcast_disabled_proxy.ImplementationName());
+
+ // Cleanup.
+ simulcast_enabled_proxy.Release();
+ simulcast_disabled_proxy.Release();
+}
+
+} // namespace testing
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcmediaengine.cc b/third_party/libwebrtc/webrtc/media/engine/webrtcmediaengine.cc
new file mode 100644
index 0000000000..8d8b768170
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcmediaengine.cc
@@ -0,0 +1,227 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/webrtcmediaengine.h"
+
+#include <algorithm>
+#include <memory>
+#include <tuple>
+#include <utility>
+
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "media/engine/webrtcvoiceengine.h"
+
+#ifdef HAVE_WEBRTC_VIDEO
+#include "media/engine/webrtcvideoengine.h"
+#else
+#include "media/engine/nullwebrtcvideoengine.h"
+#endif
+
+namespace cricket {
+
+namespace {
+
+MediaEngineInterface* CreateWebRtcMediaEngine(
+ webrtc::AudioDeviceModule* adm,
+ const rtc::scoped_refptr<webrtc::AudioEncoderFactory>&
+ audio_encoder_factory,
+ const rtc::scoped_refptr<webrtc::AudioDecoderFactory>&
+ audio_decoder_factory,
+ WebRtcVideoEncoderFactory* video_encoder_factory,
+ WebRtcVideoDecoderFactory* video_decoder_factory,
+ rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer,
+ rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing) {
+#ifdef HAVE_WEBRTC_VIDEO
+ typedef WebRtcVideoEngine VideoEngine;
+ std::tuple<std::unique_ptr<WebRtcVideoEncoderFactory>,
+ std::unique_ptr<WebRtcVideoDecoderFactory>>
+ video_args(
+ (std::unique_ptr<WebRtcVideoEncoderFactory>(video_encoder_factory)),
+ (std::unique_ptr<WebRtcVideoDecoderFactory>(video_decoder_factory)));
+#else
+ typedef NullWebRtcVideoEngine VideoEngine;
+ std::tuple<> video_args;
+#endif
+ return new CompositeMediaEngine<WebRtcVoiceEngine, VideoEngine>(
+ std::forward_as_tuple(adm, audio_encoder_factory, audio_decoder_factory,
+ audio_mixer, audio_processing),
+ std::move(video_args));
+}
+
+} // namespace
+
+MediaEngineInterface* WebRtcMediaEngineFactory::Create(
+ webrtc::AudioDeviceModule* adm,
+ const rtc::scoped_refptr<webrtc::AudioEncoderFactory>&
+ audio_encoder_factory,
+ const rtc::scoped_refptr<webrtc::AudioDecoderFactory>&
+ audio_decoder_factory,
+ WebRtcVideoEncoderFactory* video_encoder_factory,
+ WebRtcVideoDecoderFactory* video_decoder_factory) {
+ return CreateWebRtcMediaEngine(
+ adm, audio_encoder_factory, audio_decoder_factory, video_encoder_factory,
+ video_decoder_factory, nullptr, webrtc::AudioProcessing::Create());
+}
+
+MediaEngineInterface* WebRtcMediaEngineFactory::Create(
+ webrtc::AudioDeviceModule* adm,
+ const rtc::scoped_refptr<webrtc::AudioEncoderFactory>&
+ audio_encoder_factory,
+ const rtc::scoped_refptr<webrtc::AudioDecoderFactory>&
+ audio_decoder_factory,
+ WebRtcVideoEncoderFactory* video_encoder_factory,
+ WebRtcVideoDecoderFactory* video_decoder_factory,
+ rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer,
+ rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing) {
+ return CreateWebRtcMediaEngine(
+ adm, audio_encoder_factory, audio_decoder_factory, video_encoder_factory,
+ video_decoder_factory, audio_mixer, audio_processing);
+}
+
+std::unique_ptr<MediaEngineInterface> WebRtcMediaEngineFactory::Create(
+ rtc::scoped_refptr<webrtc::AudioDeviceModule> adm,
+ rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory,
+ rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory,
+ std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory,
+ std::unique_ptr<webrtc::VideoDecoderFactory> video_decoder_factory,
+ rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer,
+ rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing) {
+#ifdef HAVE_WEBRTC_VIDEO
+ typedef WebRtcVideoEngine VideoEngine;
+ std::tuple<std::unique_ptr<webrtc::VideoEncoderFactory>,
+ std::unique_ptr<webrtc::VideoDecoderFactory>>
+ video_args(std::move(video_encoder_factory),
+ std::move(video_decoder_factory));
+#else
+ typedef NullWebRtcVideoEngine VideoEngine;
+ std::tuple<> video_args;
+#endif
+ return std::unique_ptr<MediaEngineInterface>(
+ new CompositeMediaEngine<WebRtcVoiceEngine, VideoEngine>(
+ std::forward_as_tuple(adm, audio_encoder_factory,
+ audio_decoder_factory, audio_mixer,
+ audio_processing),
+ std::move(video_args)));
+}
+
+namespace {
+// Remove mutually exclusive extensions with lower priority.
+void DiscardRedundantExtensions(
+ std::vector<webrtc::RtpExtension>* extensions,
+ rtc::ArrayView<const char* const> extensions_decreasing_prio) {
+ RTC_DCHECK(extensions);
+ bool found = false;
+ for (const char* uri : extensions_decreasing_prio) {
+ auto it = std::find_if(
+ extensions->begin(), extensions->end(),
+ [uri](const webrtc::RtpExtension& rhs) { return rhs.uri == uri; });
+ if (it != extensions->end()) {
+ if (found) {
+ extensions->erase(it);
+ }
+ found = true;
+ }
+ }
+}
+} // namespace
+
+bool ValidateRtpExtensions(
+ const std::vector<webrtc::RtpExtension>& extensions) {
+ bool id_used[14] = {false};
+ for (const auto& extension : extensions) {
+ if (extension.id <= 0 || extension.id >= 15) {
+ RTC_LOG(LS_ERROR) << "Bad RTP extension ID: " << extension.ToString();
+ return false;
+ }
+ if (id_used[extension.id - 1]) {
+ RTC_LOG(LS_ERROR) << "Duplicate RTP extension ID: "
+ << extension.ToString();
+ return false;
+ }
+ id_used[extension.id - 1] = true;
+ }
+ return true;
+}
+
+std::vector<webrtc::RtpExtension> FilterRtpExtensions(
+ const std::vector<webrtc::RtpExtension>& extensions,
+ bool (*supported)(const std::string&),
+ bool filter_redundant_extensions) {
+ RTC_DCHECK(ValidateRtpExtensions(extensions));
+ RTC_DCHECK(supported);
+ std::vector<webrtc::RtpExtension> result;
+
+ // Ignore any extensions that we don't recognize.
+ for (const auto& extension : extensions) {
+ if (supported(extension.uri)) {
+ result.push_back(extension);
+ } else {
+ RTC_LOG(LS_WARNING) << "Unsupported RTP extension: "
+ << extension.ToString();
+ }
+ }
+
+ // Sort by name, ascending (prioritise encryption), so that we don't reset
+ // extensions if they were specified in a different order (also allows us
+ // to use std::unique below).
+ std::sort(result.begin(), result.end(),
+ [](const webrtc::RtpExtension& rhs,
+ const webrtc::RtpExtension& lhs) {
+ return rhs.encrypt == lhs.encrypt ? rhs.uri < lhs.uri
+ : rhs.encrypt > lhs.encrypt;
+ });
+
+ // Remove unnecessary extensions (used on send side).
+ if (filter_redundant_extensions) {
+ auto it = std::unique(
+ result.begin(), result.end(),
+ [](const webrtc::RtpExtension& rhs, const webrtc::RtpExtension& lhs) {
+ return rhs.uri == lhs.uri && rhs.encrypt == lhs.encrypt;
+ });
+ result.erase(it, result.end());
+
+ // Keep just the highest priority extension of any in the following list.
+ static const char* const kBweExtensionPriorities[] = {
+ webrtc::RtpExtension::kTransportSequenceNumberUri,
+ webrtc::RtpExtension::kAbsSendTimeUri,
+ webrtc::RtpExtension::kTimestampOffsetUri};
+ DiscardRedundantExtensions(&result, kBweExtensionPriorities);
+ }
+
+ return result;
+}
+
+webrtc::Call::Config::BitrateConfig GetBitrateConfigForCodec(
+ const Codec& codec) {
+ webrtc::Call::Config::BitrateConfig config;
+ int bitrate_kbps = 0;
+ if (codec.GetParam(kCodecParamMinBitrate, &bitrate_kbps) &&
+ bitrate_kbps > 0) {
+ config.min_bitrate_bps = bitrate_kbps * 1000;
+ } else {
+ config.min_bitrate_bps = 0;
+ }
+ if (codec.GetParam(kCodecParamStartBitrate, &bitrate_kbps) &&
+ bitrate_kbps > 0) {
+ config.start_bitrate_bps = bitrate_kbps * 1000;
+ } else {
+ // Do not reconfigure start bitrate unless it's specified and positive.
+ config.start_bitrate_bps = -1;
+ }
+ if (codec.GetParam(kCodecParamMaxBitrate, &bitrate_kbps) &&
+ bitrate_kbps > 0) {
+ config.max_bitrate_bps = bitrate_kbps * 1000;
+ } else {
+ config.max_bitrate_bps = -1;
+ }
+ return config;
+}
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcmediaengine.h b/third_party/libwebrtc/webrtc/media/engine/webrtcmediaengine.h
new file mode 100644
index 0000000000..1b1649aec6
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcmediaengine.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_WEBRTCMEDIAENGINE_H_
+#define MEDIA_ENGINE_WEBRTCMEDIAENGINE_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "call/call.h"
+#include "media/base/mediaengine.h"
+
+namespace webrtc {
+class AudioDecoderFactory;
+class AudioDeviceModule;
+class AudioMixer;
+class AudioProcessing;
+class VideoDecoderFactory;
+class VideoEncoderFactory;
+}
+namespace cricket {
+class WebRtcVideoDecoderFactory;
+class WebRtcVideoEncoderFactory;
+}
+
+namespace cricket {
+
+class WebRtcMediaEngineFactory {
+ public:
+ // These Create methods may be called on any thread, though the engine is
+ // only expected to be used on one thread, internally called the "worker
+ // thread". This is the thread Init must be called on.
+ //
+ // TODO(deadbeef): Change these to return an std::unique_ptr<>, to indicate
+ // that the caller owns the returned object.
+ static MediaEngineInterface* Create(
+ webrtc::AudioDeviceModule* adm,
+ const rtc::scoped_refptr<webrtc::AudioEncoderFactory>&
+ audio_encoder_factory,
+ const rtc::scoped_refptr<webrtc::AudioDecoderFactory>&
+ audio_decoder_factory,
+ WebRtcVideoEncoderFactory* video_encoder_factory,
+ WebRtcVideoDecoderFactory* video_decoder_factory);
+ static MediaEngineInterface* Create(
+ webrtc::AudioDeviceModule* adm,
+ const rtc::scoped_refptr<webrtc::AudioEncoderFactory>&
+ audio_encoder_factory,
+ const rtc::scoped_refptr<webrtc::AudioDecoderFactory>&
+ audio_decoder_factory,
+ WebRtcVideoEncoderFactory* video_encoder_factory,
+ WebRtcVideoDecoderFactory* video_decoder_factory,
+ rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer,
+ rtc::scoped_refptr<webrtc::AudioProcessing> apm);
+
+ // Create a MediaEngineInterface with optional video codec factories. These
+ // video factories represents all video codecs, i.e. no extra internal video
+ // codecs will be added.
+ static std::unique_ptr<MediaEngineInterface> Create(
+ rtc::scoped_refptr<webrtc::AudioDeviceModule> adm,
+ rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory,
+ rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory,
+ std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory,
+ std::unique_ptr<webrtc::VideoDecoderFactory> video_decoder_factory,
+ rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer,
+ rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing);
+};
+
+// Verify that extension IDs are within 1-byte extension range and are not
+// overlapping.
+bool ValidateRtpExtensions(const std::vector<webrtc::RtpExtension>& extensions);
+
+// Discard any extensions not validated by the 'supported' predicate. Duplicate
+// extensions are removed if 'filter_redundant_extensions' is set, and also any
+// mutually exclusive extensions (see implementation for details) are removed.
+std::vector<webrtc::RtpExtension> FilterRtpExtensions(
+ const std::vector<webrtc::RtpExtension>& extensions,
+ bool (*supported)(const std::string&),
+ bool filter_redundant_extensions);
+
+webrtc::Call::Config::BitrateConfig GetBitrateConfigForCodec(
+ const Codec& codec);
+
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_WEBRTCMEDIAENGINE_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcmediaengine_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/webrtcmediaengine_unittest.cc
new file mode 100644
index 0000000000..85170e24e5
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcmediaengine_unittest.cc
@@ -0,0 +1,246 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "media/engine/webrtcmediaengine.h"
+#include "test/gtest.h"
+
+using webrtc::RtpExtension;
+
+namespace cricket {
+namespace {
+
+std::vector<RtpExtension> MakeUniqueExtensions() {
+ std::vector<RtpExtension> result;
+ char name[] = "a";
+ for (int i = 0; i < 7; ++i) {
+ result.push_back(RtpExtension(name, 1 + i));
+ name[0]++;
+ result.push_back(RtpExtension(name, 14 - i));
+ name[0]++;
+ }
+ return result;
+}
+
+std::vector<RtpExtension> MakeRedundantExtensions() {
+ std::vector<RtpExtension> result;
+ char name[] = "a";
+ for (int i = 0; i < 7; ++i) {
+ result.push_back(RtpExtension(name, 1 + i));
+ result.push_back(RtpExtension(name, 14 - i));
+ name[0]++;
+ }
+ return result;
+}
+
+bool SupportedExtensions1(const std::string& name) {
+ return name == "c" || name == "i";
+}
+
+bool SupportedExtensions2(const std::string& name) {
+ return name != "a" && name != "n";
+}
+
+bool IsSorted(const std::vector<webrtc::RtpExtension>& extensions) {
+ const std::string* last = nullptr;
+ for (const auto& extension : extensions) {
+ if (last && *last > extension.uri) {
+ return false;
+ }
+ last = &extension.uri;
+ }
+ return true;
+}
+} // namespace
+
+TEST(WebRtcMediaEngineTest, ValidateRtpExtensions_EmptyList) {
+ std::vector<RtpExtension> extensions;
+ EXPECT_TRUE(ValidateRtpExtensions(extensions));
+}
+
+TEST(WebRtcMediaEngineTest, ValidateRtpExtensions_AllGood) {
+ std::vector<RtpExtension> extensions = MakeUniqueExtensions();
+ EXPECT_TRUE(ValidateRtpExtensions(extensions));
+}
+
+TEST(WebRtcMediaEngineTest, ValidateRtpExtensions_OutOfRangeId_Low) {
+ std::vector<RtpExtension> extensions = MakeUniqueExtensions();
+ extensions.push_back(RtpExtension("foo", 0));
+ EXPECT_FALSE(ValidateRtpExtensions(extensions));
+}
+
+TEST(WebRtcMediaEngineTest, ValidateRtpExtensions_OutOfRangeId_High) {
+ std::vector<RtpExtension> extensions = MakeUniqueExtensions();
+ extensions.push_back(RtpExtension("foo", 15));
+ EXPECT_FALSE(ValidateRtpExtensions(extensions));
+}
+
+TEST(WebRtcMediaEngineTest, ValidateRtpExtensions_OverlappingIds_StartOfSet) {
+ std::vector<RtpExtension> extensions = MakeUniqueExtensions();
+ extensions.push_back(RtpExtension("foo", 1));
+ EXPECT_FALSE(ValidateRtpExtensions(extensions));
+}
+
+TEST(WebRtcMediaEngineTest, ValidateRtpExtensions_OverlappingIds_EndOfSet) {
+ std::vector<RtpExtension> extensions = MakeUniqueExtensions();
+ extensions.push_back(RtpExtension("foo", 14));
+ EXPECT_FALSE(ValidateRtpExtensions(extensions));
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_EmptyList) {
+ std::vector<RtpExtension> extensions;
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions1, true);
+ EXPECT_EQ(0, filtered.size());
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_IncludeOnlySupported) {
+ std::vector<RtpExtension> extensions = MakeUniqueExtensions();
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions1, false);
+ EXPECT_EQ(2, filtered.size());
+ EXPECT_EQ("c", filtered[0].uri);
+ EXPECT_EQ("i", filtered[1].uri);
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_SortedByName_1) {
+ std::vector<RtpExtension> extensions = MakeUniqueExtensions();
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, false);
+ EXPECT_EQ(12, filtered.size());
+ EXPECT_TRUE(IsSorted(filtered));
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_SortedByName_2) {
+ std::vector<RtpExtension> extensions = MakeUniqueExtensions();
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, true);
+ EXPECT_EQ(12, filtered.size());
+ EXPECT_TRUE(IsSorted(filtered));
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_DontRemoveRedundant) {
+ std::vector<RtpExtension> extensions = MakeRedundantExtensions();
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, false);
+ EXPECT_EQ(12, filtered.size());
+ EXPECT_TRUE(IsSorted(filtered));
+ EXPECT_EQ(filtered[0].uri, filtered[1].uri);
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundant) {
+ std::vector<RtpExtension> extensions = MakeRedundantExtensions();
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, true);
+ EXPECT_EQ(6, filtered.size());
+ EXPECT_TRUE(IsSorted(filtered));
+ EXPECT_NE(filtered[0].uri, filtered[1].uri);
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantEncrypted_1) {
+ std::vector<RtpExtension> extensions;
+ extensions.push_back(webrtc::RtpExtension("b", 1));
+ extensions.push_back(webrtc::RtpExtension("b", 2, true));
+ extensions.push_back(webrtc::RtpExtension("c", 3));
+ extensions.push_back(webrtc::RtpExtension("b", 4));
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, true);
+ EXPECT_EQ(3, filtered.size());
+ EXPECT_TRUE(IsSorted(filtered));
+ EXPECT_EQ(filtered[0].uri, filtered[1].uri);
+ EXPECT_NE(filtered[0].encrypt, filtered[1].encrypt);
+ EXPECT_NE(filtered[0].uri, filtered[2].uri);
+ EXPECT_NE(filtered[1].uri, filtered[2].uri);
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantEncrypted_2) {
+ std::vector<RtpExtension> extensions;
+ extensions.push_back(webrtc::RtpExtension("b", 1, true));
+ extensions.push_back(webrtc::RtpExtension("b", 2));
+ extensions.push_back(webrtc::RtpExtension("c", 3));
+ extensions.push_back(webrtc::RtpExtension("b", 4));
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, true);
+ EXPECT_EQ(3, filtered.size());
+ EXPECT_TRUE(IsSorted(filtered));
+ EXPECT_EQ(filtered[0].uri, filtered[1].uri);
+ EXPECT_NE(filtered[0].encrypt, filtered[1].encrypt);
+ EXPECT_NE(filtered[0].uri, filtered[2].uri);
+ EXPECT_NE(filtered[1].uri, filtered[2].uri);
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantBwe_1) {
+ std::vector<RtpExtension> extensions;
+ extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri, 3));
+ extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 9));
+ extensions.push_back(RtpExtension(RtpExtension::kAbsSendTimeUri, 6));
+ extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri, 1));
+ extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 14));
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, true);
+ EXPECT_EQ(1, filtered.size());
+ EXPECT_EQ(RtpExtension::kTransportSequenceNumberUri, filtered[0].uri);
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantBweEncrypted_1) {
+ std::vector<RtpExtension> extensions;
+ extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri, 3));
+ extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri, 4, true));
+ extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 9));
+ extensions.push_back(RtpExtension(RtpExtension::kAbsSendTimeUri, 6));
+ extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri, 1));
+ extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri, 2, true));
+ extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 14));
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, true);
+ EXPECT_EQ(2, filtered.size());
+ EXPECT_EQ(RtpExtension::kTransportSequenceNumberUri, filtered[0].uri);
+ EXPECT_EQ(RtpExtension::kTransportSequenceNumberUri, filtered[1].uri);
+ EXPECT_NE(filtered[0].encrypt, filtered[1].encrypt);
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantBwe_2) {
+ std::vector<RtpExtension> extensions;
+ extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 1));
+ extensions.push_back(RtpExtension(RtpExtension::kAbsSendTimeUri, 14));
+ extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 7));
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, true);
+ EXPECT_EQ(1, filtered.size());
+ EXPECT_EQ(RtpExtension::kAbsSendTimeUri, filtered[0].uri);
+}
+
+TEST(WebRtcMediaEngineTest, FilterRtpExtensions_RemoveRedundantBwe_3) {
+ std::vector<RtpExtension> extensions;
+ extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 2));
+ extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 14));
+ std::vector<webrtc::RtpExtension> filtered =
+ FilterRtpExtensions(extensions, SupportedExtensions2, true);
+ EXPECT_EQ(1, filtered.size());
+ EXPECT_EQ(RtpExtension::kTimestampOffsetUri, filtered[0].uri);
+}
+
+TEST(WebRtcMediaEngineFactoryTest, CreateWithBuiltinDecoders) {
+ std::unique_ptr<MediaEngineInterface> engine(WebRtcMediaEngineFactory::Create(
+ nullptr, webrtc::CreateBuiltinAudioEncoderFactory(),
+ webrtc::CreateBuiltinAudioDecoderFactory(), nullptr, nullptr));
+ EXPECT_TRUE(engine);
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturer.cc b/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturer.cc
new file mode 100644
index 0000000000..cf9eab6e6c
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturer.cc
@@ -0,0 +1,342 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/webrtcvideocapturer.h"
+
+#include "rtc_base/arraysize.h"
+#include "rtc_base/bind.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/criticalsection.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/timeutils.h"
+
+#include "modules/video_capture/video_capture_factory.h"
+#include "rtc_base/win32.h" // Need this to #include the impl files.
+#include "system_wrappers/include/field_trial.h"
+
+namespace cricket {
+
+namespace {
+struct kVideoFourCCEntry {
+ uint32_t fourcc;
+ webrtc::VideoType webrtc_type;
+};
+
+// This indicates our format preferences and defines a mapping between
+// webrtc::RawVideoType (from video_capture_defines.h) to our FOURCCs.
+kVideoFourCCEntry kSupportedFourCCs[] = {
+ {FOURCC_I420, webrtc::VideoType::kI420}, // 12 bpp, no conversion.
+ {FOURCC_YV12, webrtc::VideoType::kYV12}, // 12 bpp, no conversion.
+ {FOURCC_YUY2, webrtc::VideoType::kYUY2}, // 16 bpp, fast conversion.
+ {FOURCC_UYVY, webrtc::VideoType::kUYVY}, // 16 bpp, fast conversion.
+ {FOURCC_NV12, webrtc::VideoType::kNV12}, // 12 bpp, fast conversion.
+ {FOURCC_NV21, webrtc::VideoType::kNV21}, // 12 bpp, fast conversion.
+ {FOURCC_MJPG, webrtc::VideoType::kMJPEG}, // compressed, slow conversion.
+ {FOURCC_ARGB, webrtc::VideoType::kARGB}, // 32 bpp, slow conversion.
+ {FOURCC_24BG, webrtc::VideoType::kRGB24}, // 24 bpp, slow conversion.
+};
+
+bool CapabilityToFormat(const webrtc::VideoCaptureCapability& cap,
+ VideoFormat* format) {
+ uint32_t fourcc = 0;
+ for (size_t i = 0; i < arraysize(kSupportedFourCCs); ++i) {
+ if (kSupportedFourCCs[i].webrtc_type == cap.videoType) {
+ fourcc = kSupportedFourCCs[i].fourcc;
+ break;
+ }
+ }
+ if (fourcc == 0) {
+ return false;
+ }
+
+ format->fourcc = fourcc;
+ format->width = cap.width;
+ format->height = cap.height;
+ format->interval = VideoFormat::FpsToInterval(cap.maxFPS);
+ return true;
+}
+
+bool FormatToCapability(const VideoFormat& format,
+ webrtc::VideoCaptureCapability* cap) {
+ webrtc::VideoType webrtc_type = webrtc::VideoType::kUnknown;
+ for (size_t i = 0; i < arraysize(kSupportedFourCCs); ++i) {
+ if (kSupportedFourCCs[i].fourcc == format.fourcc) {
+ webrtc_type = kSupportedFourCCs[i].webrtc_type;
+ break;
+ }
+ }
+ if (webrtc_type == webrtc::VideoType::kUnknown) {
+ return false;
+ }
+
+ cap->width = format.width;
+ cap->height = format.height;
+ cap->maxFPS = VideoFormat::IntervalToFps(format.interval);
+ cap->videoType = webrtc_type;
+ cap->interlaced = false;
+ return true;
+}
+
+} // namespace
+
+class WebRtcVcmFactory : public WebRtcVcmFactoryInterface {
+ public:
+ virtual rtc::scoped_refptr<webrtc::VideoCaptureModule> Create(
+ const char* device) {
+ return webrtc::VideoCaptureFactory::Create(device);
+ }
+ virtual webrtc::VideoCaptureModule::DeviceInfo* CreateDeviceInfo() {
+ return webrtc::VideoCaptureFactory::CreateDeviceInfo();
+ }
+ virtual void DestroyDeviceInfo(webrtc::VideoCaptureModule::DeviceInfo* info) {
+ delete info;
+ }
+};
+
+///////////////////////////////////////////////////////////////////////////
+// Implementation of class WebRtcVideoCapturer
+///////////////////////////////////////////////////////////////////////////
+
+WebRtcVideoCapturer::WebRtcVideoCapturer()
+ : factory_(new WebRtcVcmFactory),
+ module_(nullptr),
+ captured_frames_(0),
+ start_thread_(nullptr) {}
+
+WebRtcVideoCapturer::WebRtcVideoCapturer(WebRtcVcmFactoryInterface* factory)
+ : factory_(factory),
+ module_(nullptr),
+ captured_frames_(0),
+ start_thread_(nullptr) {}
+
+WebRtcVideoCapturer::~WebRtcVideoCapturer() {}
+
+bool WebRtcVideoCapturer::Init(const Device& device) {
+ RTC_DCHECK(!start_thread_);
+ if (module_) {
+ RTC_LOG(LS_ERROR) << "The capturer is already initialized";
+ return false;
+ }
+
+ webrtc::VideoCaptureModule::DeviceInfo* info = factory_->CreateDeviceInfo();
+ if (!info) {
+ return false;
+ }
+
+ // Find the desired camera, by name.
+ // In the future, comparing IDs will be more robust.
+ // TODO(juberti): Figure what's needed to allow this.
+ int num_cams = info->NumberOfDevices();
+ char vcm_id[256] = "";
+ bool found = false;
+ for (int index = 0; index < num_cams; ++index) {
+ char vcm_name[256];
+ if (info->GetDeviceName(index, vcm_name, arraysize(vcm_name), vcm_id,
+ arraysize(vcm_id)) != -1) {
+ if (device.name == reinterpret_cast<char*>(vcm_name)) {
+ found = true;
+ break;
+ }
+ }
+ }
+ if (!found) {
+ RTC_LOG(LS_WARNING) << "Failed to find capturer for id: " << device.id;
+ factory_->DestroyDeviceInfo(info);
+ return false;
+ }
+
+ // Enumerate the supported formats.
+ // TODO(juberti): Find out why this starts/stops the camera...
+ std::vector<VideoFormat> supported;
+ int32_t num_caps = info->NumberOfCapabilities(vcm_id);
+ for (int32_t i = 0; i < num_caps; ++i) {
+ webrtc::VideoCaptureCapability cap;
+ if (info->GetCapability(vcm_id, i, cap) != -1) {
+ VideoFormat format;
+ if (CapabilityToFormat(cap, &format)) {
+ supported.push_back(format);
+ } else {
+ RTC_LOG(LS_WARNING) << "Ignoring unsupported WebRTC capture format "
+ << static_cast<int>(cap.videoType);
+ }
+ }
+ }
+ factory_->DestroyDeviceInfo(info);
+
+ if (supported.empty()) {
+ RTC_LOG(LS_ERROR) << "Failed to find usable formats for id: " << device.id;
+ return false;
+ }
+
+ module_ = factory_->Create(vcm_id);
+ if (!module_) {
+ RTC_LOG(LS_ERROR) << "Failed to create capturer for id: " << device.id;
+ return false;
+ }
+
+ // It is safe to change member attributes now.
+ SetId(device.id);
+ SetSupportedFormats(supported);
+
+ return true;
+}
+
+bool WebRtcVideoCapturer::Init(
+ const rtc::scoped_refptr<webrtc::VideoCaptureModule>& module) {
+ RTC_DCHECK(!start_thread_);
+ if (module_) {
+ RTC_LOG(LS_ERROR) << "The capturer is already initialized";
+ return false;
+ }
+ if (!module) {
+ RTC_LOG(LS_ERROR) << "Invalid VCM supplied";
+ return false;
+ }
+ // TODO(juberti): Set id and formats.
+ module_ = module;
+ return true;
+}
+
+bool WebRtcVideoCapturer::GetBestCaptureFormat(const VideoFormat& desired,
+ VideoFormat* best_format) {
+ if (!best_format) {
+ return false;
+ }
+
+ if (!VideoCapturer::GetBestCaptureFormat(desired, best_format)) {
+ // We maybe using a manually injected VCM which doesn't support enum.
+ // Use the desired format as the best format.
+ best_format->width = desired.width;
+ best_format->height = desired.height;
+ best_format->fourcc = FOURCC_I420;
+ best_format->interval = desired.interval;
+ RTC_LOG(LS_INFO) << "Failed to find best capture format,"
+ << " fall back to the requested format "
+ << best_format->ToString();
+ }
+ return true;
+}
+void WebRtcVideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
+ // Can't take lock here as this will cause deadlock with
+ // OnIncomingCapturedFrame. In fact, the whole method, including methods it
+ // calls, can't take lock.
+ RTC_DCHECK(module_);
+
+ if (webrtc::field_trial::FindFullName("WebRTC-CVO").find("Disabled") == 0)
+ return;
+
+ VideoCapturer::OnSinkWantsChanged(wants);
+ bool result = module_->SetApplyRotation(wants.rotation_applied);
+ RTC_CHECK(result);
+
+ return;
+}
+
+CaptureState WebRtcVideoCapturer::Start(const VideoFormat& capture_format) {
+ if (!module_) {
+ RTC_LOG(LS_ERROR) << "The capturer has not been initialized";
+ return CS_FAILED;
+ }
+ if (start_thread_) {
+ RTC_LOG(LS_ERROR) << "The capturer is already running";
+ RTC_DCHECK(start_thread_->IsCurrent())
+ << "Trying to start capturer on different threads";
+ return CS_FAILED;
+ }
+
+ start_thread_ = rtc::Thread::Current();
+ captured_frames_ = 0;
+
+ SetCaptureFormat(&capture_format);
+
+ webrtc::VideoCaptureCapability cap;
+ if (!FormatToCapability(capture_format, &cap)) {
+ RTC_LOG(LS_ERROR) << "Invalid capture format specified";
+ return CS_FAILED;
+ }
+
+ int64_t start = rtc::TimeMillis();
+ module_->RegisterCaptureDataCallback(this);
+ if (module_->StartCapture(cap) != 0) {
+ RTC_LOG(LS_ERROR) << "Camera '" << GetId() << "' failed to start";
+ module_->DeRegisterCaptureDataCallback();
+ SetCaptureFormat(nullptr);
+ start_thread_ = nullptr;
+ return CS_FAILED;
+ }
+
+ RTC_LOG(LS_INFO) << "Camera '" << GetId() << "' started with format "
+ << capture_format.ToString() << ", elapsed time "
+ << rtc::TimeSince(start) << " ms";
+
+ SetCaptureState(CS_RUNNING);
+ return CS_STARTING;
+}
+
+void WebRtcVideoCapturer::Stop() {
+ if (!start_thread_) {
+ RTC_LOG(LS_ERROR) << "The capturer is already stopped";
+ return;
+ }
+ RTC_DCHECK(start_thread_);
+ RTC_DCHECK(start_thread_->IsCurrent());
+ if (IsRunning()) {
+ // The module is responsible for OnIncomingCapturedFrame being called, if
+ // we stop it we will get no further callbacks.
+ module_->StopCapture();
+ }
+ module_->DeRegisterCaptureDataCallback();
+
+ // TODO(juberti): Determine if the VCM exposes any drop stats we can use.
+ double drop_ratio = 0.0;
+ RTC_LOG(LS_INFO) << "Camera '" << GetId() << "' stopped after capturing "
+ << captured_frames_ << " frames and dropping " << drop_ratio
+ << "%";
+
+ SetCaptureFormat(NULL);
+ start_thread_ = nullptr;
+ SetCaptureState(CS_STOPPED);
+}
+
+bool WebRtcVideoCapturer::IsRunning() {
+ return (module_ != NULL && module_->CaptureStarted());
+}
+
+bool WebRtcVideoCapturer::GetPreferredFourccs(std::vector<uint32_t>* fourccs) {
+ if (!fourccs) {
+ return false;
+ }
+
+ fourccs->clear();
+ for (size_t i = 0; i < arraysize(kSupportedFourCCs); ++i) {
+ fourccs->push_back(kSupportedFourCCs[i].fourcc);
+ }
+ return true;
+}
+
+void WebRtcVideoCapturer::OnFrame(
+ const webrtc::VideoFrame& sample) {
+ // This can only happen between Start() and Stop().
+ RTC_DCHECK(start_thread_);
+
+ ++captured_frames_;
+ // Log the size and pixel aspect ratio of the first captured frame.
+ if (1 == captured_frames_) {
+ RTC_LOG(LS_INFO) << "Captured frame size " << sample.width() << "x"
+ << sample.height() << ". Expected format "
+ << GetCaptureFormat()->ToString();
+ }
+
+ VideoCapturer::OnFrame(sample, sample.width(), sample.height());
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturer.h b/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturer.h
new file mode 100644
index 0000000000..a7c49fb14c
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturer.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_WEBRTCVIDEOCAPTURER_H_
+#define MEDIA_ENGINE_WEBRTCVIDEOCAPTURER_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "media/base/device.h"
+#include "media/base/videocapturer.h"
+#include "modules/video_capture/video_capture.h"
+#include "rtc_base/asyncinvoker.h"
+#include "rtc_base/messagehandler.h"
+#include "rtc_base/scoped_ref_ptr.h"
+
+namespace cricket {
+
+// Factory to allow injection of a VCM impl into WebRtcVideoCapturer.
+// DeviceInfos do not have a Release() and therefore need an explicit Destroy().
+class WebRtcVcmFactoryInterface {
+ public:
+ virtual ~WebRtcVcmFactoryInterface() {}
+ virtual rtc::scoped_refptr<webrtc::VideoCaptureModule> Create(
+ const char* device) = 0;
+ virtual webrtc::VideoCaptureModule::DeviceInfo* CreateDeviceInfo() = 0;
+ virtual void DestroyDeviceInfo(
+ webrtc::VideoCaptureModule::DeviceInfo* info) = 0;
+};
+
+// WebRTC-based implementation of VideoCapturer.
+class WebRtcVideoCapturer : public VideoCapturer,
+ public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ WebRtcVideoCapturer();
+ explicit WebRtcVideoCapturer(WebRtcVcmFactoryInterface* factory);
+ virtual ~WebRtcVideoCapturer();
+
+ bool Init(const Device& device);
+ bool Init(const rtc::scoped_refptr<webrtc::VideoCaptureModule>& module);
+
+ // Override virtual methods of the parent class VideoCapturer.
+ bool GetBestCaptureFormat(const VideoFormat& desired,
+ VideoFormat* best_format) override;
+ CaptureState Start(const VideoFormat& capture_format) override;
+ void Stop() override;
+ bool IsRunning() override;
+ bool IsScreencast() const override { return false; }
+
+ protected:
+ void OnSinkWantsChanged(const rtc::VideoSinkWants& wants) override;
+ // Override virtual methods of the parent class VideoCapturer.
+ bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) override;
+
+ private:
+ // Callback when a frame is captured by camera.
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+
+ // Used to signal captured frames on the same thread as invoked Start().
+ // With WebRTC's current VideoCapturer implementations, this will mean a
+ // thread hop, but in other implementations (e.g. Chrome) it will be called
+ // directly from OnIncomingCapturedFrame.
+ // TODO(tommi): Remove this workaround when we've updated the WebRTC capturers
+ // to follow the same contract.
+ void SignalFrameCapturedOnStartThread(const webrtc::VideoFrame& frame);
+
+ std::unique_ptr<WebRtcVcmFactoryInterface> factory_;
+ rtc::scoped_refptr<webrtc::VideoCaptureModule> module_;
+ int captured_frames_;
+ std::vector<uint8_t> capture_buffer_;
+ rtc::Thread* start_thread_; // Set in Start(), unset in Stop();
+};
+
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_WEBRTCVIDEOCAPTURER_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturer_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturer_unittest.cc
new file mode 100644
index 0000000000..8770960c4c
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturer_unittest.cc
@@ -0,0 +1,134 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifdef HAVE_WEBRTC_VIDEO
+
+#include <stdio.h>
+
+#include <memory>
+#include <vector>
+
+#include "media/base/testutils.h"
+#include "media/base/videocommon.h"
+#include "media/engine/fakewebrtcvcmfactory.h"
+#include "media/engine/webrtcvideocapturer.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/stringutils.h"
+#include "rtc_base/thread.h"
+
+using cricket::VideoFormat;
+
+static const char kTestDeviceName[] = "JuberTech FakeCam Q123";
+static const char kTestDeviceId[] = "foo://bar/baz";
+const VideoFormat kDefaultVideoFormat =
+ VideoFormat(640, 400, VideoFormat::FpsToInterval(30), cricket::FOURCC_ANY);
+
+class WebRtcVideoCapturerTest : public testing::Test {
+ public:
+ WebRtcVideoCapturerTest()
+ : factory_(new FakeWebRtcVcmFactory),
+ capturer_(new cricket::WebRtcVideoCapturer(factory_)) {
+ factory_->device_info.AddDevice(kTestDeviceName, kTestDeviceId);
+ // add a VGA/I420 capability
+ webrtc::VideoCaptureCapability vga;
+ vga.width = 640;
+ vga.height = 480;
+ vga.maxFPS = 30;
+ vga.videoType = webrtc::VideoType::kI420;
+ factory_->device_info.AddCapability(kTestDeviceId, vga);
+ }
+
+ protected:
+ FakeWebRtcVcmFactory* factory_; // owned by capturer_
+ std::unique_ptr<cricket::WebRtcVideoCapturer> capturer_;
+};
+
+TEST_F(WebRtcVideoCapturerTest, TestNotOpened) {
+ EXPECT_EQ("", capturer_->GetId());
+ EXPECT_TRUE(capturer_->GetSupportedFormats()->empty());
+ EXPECT_TRUE(capturer_->GetCaptureFormat() == NULL);
+ EXPECT_FALSE(capturer_->IsRunning());
+}
+
+TEST_F(WebRtcVideoCapturerTest, TestBadInit) {
+ EXPECT_FALSE(capturer_->Init(cricket::Device("bad-name", "bad-id")));
+ EXPECT_FALSE(capturer_->IsRunning());
+}
+
+TEST_F(WebRtcVideoCapturerTest, TestInit) {
+ EXPECT_TRUE(capturer_->Init(cricket::Device(kTestDeviceName, kTestDeviceId)));
+ EXPECT_EQ(kTestDeviceId, capturer_->GetId());
+ EXPECT_TRUE(NULL != capturer_->GetSupportedFormats());
+ ASSERT_EQ(1U, capturer_->GetSupportedFormats()->size());
+ EXPECT_EQ(640, (*capturer_->GetSupportedFormats())[0].width);
+ EXPECT_EQ(480, (*capturer_->GetSupportedFormats())[0].height);
+ EXPECT_TRUE(capturer_->GetCaptureFormat() == NULL); // not started yet
+ EXPECT_FALSE(capturer_->IsRunning());
+}
+
+TEST_F(WebRtcVideoCapturerTest, TestInitVcm) {
+ EXPECT_TRUE(capturer_->Init(factory_->Create(kTestDeviceId)));
+}
+
+TEST_F(WebRtcVideoCapturerTest, TestCapture) {
+ EXPECT_TRUE(capturer_->Init(cricket::Device(kTestDeviceName, kTestDeviceId)));
+ cricket::VideoCapturerListener listener(capturer_.get());
+ cricket::VideoFormat format(
+ capturer_->GetSupportedFormats()->at(0));
+ EXPECT_EQ(cricket::CS_STARTING, capturer_->Start(format));
+ EXPECT_TRUE(capturer_->IsRunning());
+ ASSERT_TRUE(capturer_->GetCaptureFormat() != NULL);
+ EXPECT_EQ(format, *capturer_->GetCaptureFormat());
+ EXPECT_EQ_WAIT(cricket::CS_RUNNING, listener.last_capture_state(), 1000);
+ factory_->modules[0]->SendFrame(640, 480);
+ EXPECT_TRUE_WAIT(listener.frame_count() > 0, 5000);
+ EXPECT_EQ(640, listener.frame_width());
+ EXPECT_EQ(480, listener.frame_height());
+ EXPECT_EQ(cricket::CS_FAILED, capturer_->Start(format));
+ capturer_->Stop();
+ EXPECT_FALSE(capturer_->IsRunning());
+ EXPECT_TRUE(capturer_->GetCaptureFormat() == NULL);
+ EXPECT_EQ_WAIT(cricket::CS_STOPPED, listener.last_capture_state(), 1000);
+}
+
+TEST_F(WebRtcVideoCapturerTest, TestCaptureVcm) {
+ EXPECT_TRUE(capturer_->Init(factory_->Create(kTestDeviceId)));
+ cricket::VideoCapturerListener listener(capturer_.get());
+ EXPECT_TRUE(capturer_->GetSupportedFormats()->empty());
+ VideoFormat format;
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(kDefaultVideoFormat, &format));
+ EXPECT_EQ(kDefaultVideoFormat.width, format.width);
+ EXPECT_EQ(kDefaultVideoFormat.height, format.height);
+ EXPECT_EQ(kDefaultVideoFormat.interval, format.interval);
+ EXPECT_EQ(cricket::FOURCC_I420, format.fourcc);
+ EXPECT_EQ(cricket::CS_STARTING, capturer_->Start(format));
+ EXPECT_TRUE(capturer_->IsRunning());
+ ASSERT_TRUE(capturer_->GetCaptureFormat() != NULL);
+ EXPECT_EQ(format, *capturer_->GetCaptureFormat());
+ EXPECT_EQ_WAIT(cricket::CS_RUNNING, listener.last_capture_state(), 1000);
+ factory_->modules[0]->SendFrame(640, 480);
+ EXPECT_TRUE_WAIT(listener.frame_count() > 0, 5000);
+ EXPECT_EQ(640, listener.frame_width());
+ EXPECT_EQ(480, listener.frame_height());
+ EXPECT_EQ(cricket::CS_FAILED, capturer_->Start(format));
+ capturer_->Stop();
+ EXPECT_FALSE(capturer_->IsRunning());
+ EXPECT_TRUE(capturer_->GetCaptureFormat() == NULL);
+}
+
+TEST_F(WebRtcVideoCapturerTest, TestCaptureWithoutInit) {
+ cricket::VideoFormat format;
+ EXPECT_EQ(cricket::CS_FAILED, capturer_->Start(format));
+ EXPECT_TRUE(capturer_->GetCaptureFormat() == NULL);
+ EXPECT_FALSE(capturer_->IsRunning());
+}
+
+#endif // HAVE_WEBRTC_VIDEO
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturerfactory.cc b/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturerfactory.cc
new file mode 100644
index 0000000000..c948101ea8
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturerfactory.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <utility>
+
+#include "media/engine/webrtcvideocapturer.h"
+#include "media/engine/webrtcvideocapturerfactory.h"
+
+namespace cricket {
+
+std::unique_ptr<VideoCapturer> WebRtcVideoDeviceCapturerFactory::Create(
+ const Device& device) {
+#ifdef HAVE_WEBRTC_VIDEO
+ std::unique_ptr<WebRtcVideoCapturer> capturer(
+ new WebRtcVideoCapturer());
+ if (!capturer->Init(device)) {
+ return std::unique_ptr<VideoCapturer>();
+ }
+ return std::move(capturer);
+#else
+ return std::unique_ptr<VideoCapturer>();
+#endif
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturerfactory.h b/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturerfactory.h
new file mode 100644
index 0000000000..4f67a52237
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvideocapturerfactory.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// TODO(pthatcher): Rename file to match class name.
+#ifndef MEDIA_ENGINE_WEBRTCVIDEOCAPTURERFACTORY_H_
+#define MEDIA_ENGINE_WEBRTCVIDEOCAPTURERFACTORY_H_
+
+#include <memory>
+
+#include "media/base/videocapturerfactory.h"
+
+namespace cricket {
+
+// Creates instances of cricket::WebRtcVideoCapturer.
+class WebRtcVideoDeviceCapturerFactory : public VideoDeviceCapturerFactory {
+ public:
+ std::unique_ptr<VideoCapturer> Create(const Device& device) override;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_WEBRTCVIDEOCAPTURERFACTORY_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvideodecoderfactory.cc b/third_party/libwebrtc/webrtc/media/engine/webrtcvideodecoderfactory.cc
new file mode 100644
index 0000000000..5eee788d8f
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvideodecoderfactory.cc
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/webrtcvideodecoderfactory.h"
+
+namespace cricket {
+
+webrtc::VideoDecoder* WebRtcVideoDecoderFactory::CreateVideoDecoderWithParams(
+ const VideoCodec& codec,
+ VideoDecoderParams params) {
+ // Default implementation that delegates to old version in order to preserve
+ // backwards-compatability.
+ webrtc::VideoCodecType type = webrtc::PayloadStringToCodecType(codec.name);
+ return CreateVideoDecoderWithParams(type, params);
+}
+
+webrtc::VideoDecoder* WebRtcVideoDecoderFactory::CreateVideoDecoder(
+ webrtc::VideoCodecType type) {
+ RTC_NOTREACHED();
+ return nullptr;
+}
+
+webrtc::VideoDecoder* WebRtcVideoDecoderFactory::CreateVideoDecoderWithParams(
+ webrtc::VideoCodecType type,
+ VideoDecoderParams params) {
+ return CreateVideoDecoder(type);
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvideodecoderfactory.h b/third_party/libwebrtc/webrtc/media/engine/webrtcvideodecoderfactory.h
new file mode 100644
index 0000000000..016cb09da0
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvideodecoderfactory.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_WEBRTCVIDEODECODERFACTORY_H_
+#define MEDIA_ENGINE_WEBRTCVIDEODECODERFACTORY_H_
+
+#include <string>
+
+#include "common_types.h" // NOLINT(build/include)
+#include "media/base/codec.h"
+#include "rtc_base/refcount.h"
+
+namespace webrtc {
+class VideoDecoder;
+}
+
+namespace cricket {
+
+struct VideoDecoderParams {
+ std::string receive_stream_id;
+};
+
+// Deprecated. Use webrtc::VideoDecoderFactory instead.
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=7925
+class WebRtcVideoDecoderFactory {
+ public:
+ // Caller takes the ownership of the returned object and it should be released
+ // by calling DestroyVideoDecoder().
+ virtual webrtc::VideoDecoder* CreateVideoDecoderWithParams(
+ const VideoCodec& codec,
+ VideoDecoderParams params);
+
+ // DEPRECATED.
+ // These methods should not be used by new code and will eventually be
+ // removed. See http://crbug.com/webrtc/8140.
+ virtual webrtc::VideoDecoder* CreateVideoDecoder(webrtc::VideoCodecType type);
+
+ virtual webrtc::VideoDecoder* CreateVideoDecoderWithParams(
+ webrtc::VideoCodecType type,
+ VideoDecoderParams params);
+
+ virtual ~WebRtcVideoDecoderFactory() {}
+
+ virtual void DestroyVideoDecoder(webrtc::VideoDecoder* decoder) = 0;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_WEBRTCVIDEODECODERFACTORY_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvideoencoderfactory.cc b/third_party/libwebrtc/webrtc/media/engine/webrtcvideoencoderfactory.cc
new file mode 100644
index 0000000000..815613ec13
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvideoencoderfactory.cc
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/webrtcvideoencoderfactory.h"
+
+namespace cricket {
+
+bool WebRtcVideoEncoderFactory::EncoderTypeHasInternalSource(
+ webrtc::VideoCodecType type) const {
+ return false;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvideoencoderfactory.h b/third_party/libwebrtc/webrtc/media/engine/webrtcvideoencoderfactory.h
new file mode 100644
index 0000000000..97ac03b473
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvideoencoderfactory.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_WEBRTCVIDEOENCODERFACTORY_H_
+#define MEDIA_ENGINE_WEBRTCVIDEOENCODERFACTORY_H_
+
+#include <string>
+#include <vector>
+
+#include "common_types.h" // NOLINT(build/include)
+#include "media/base/codec.h"
+
+namespace webrtc {
+class VideoEncoder;
+}
+
+namespace cricket {
+
+// Deprecated. Use webrtc::VideoEncoderFactory instead.
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=7925
+class WebRtcVideoEncoderFactory {
+ public:
+ virtual ~WebRtcVideoEncoderFactory() {}
+
+ // Caller takes the ownership of the returned object and it should be released
+ // by calling DestroyVideoEncoder().
+ virtual webrtc::VideoEncoder* CreateVideoEncoder(
+ const cricket::VideoCodec& codec) = 0;
+
+ // Returns a list of supported codecs in order of preference.
+ virtual const std::vector<cricket::VideoCodec>& supported_codecs() const = 0;
+
+ // Returns true if encoders created by this factory of the given codec type
+ // will use internal camera sources, meaning that they don't require/expect
+ // frames to be delivered via webrtc::VideoEncoder::Encode.
+ virtual bool EncoderTypeHasInternalSource(webrtc::VideoCodecType type) const;
+
+ virtual void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) = 0;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_WEBRTCVIDEOENCODERFACTORY_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvideoencoderfactory_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/webrtcvideoencoderfactory_unittest.cc
new file mode 100644
index 0000000000..2b156a89a2
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvideoencoderfactory_unittest.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/webrtcvideoencoderfactory.h"
+
+#include "test/gtest.h"
+
+class WebRtcVideoEncoderFactoryForTest
+ : public cricket::WebRtcVideoEncoderFactory {
+ public:
+ WebRtcVideoEncoderFactoryForTest() {
+ codecs_.push_back(cricket::VideoCodec("H264"));
+ codecs_.push_back(cricket::VideoCodec("VP8"));
+ }
+
+ webrtc::VideoEncoder* CreateVideoEncoder(
+ const cricket::VideoCodec& codec) override {
+ return nullptr;
+ }
+
+ const std::vector<cricket::VideoCodec>& supported_codecs() const override {
+ return codecs_;
+ }
+
+ void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override {}
+
+ std::vector<cricket::VideoCodec> codecs_;
+};
+
+TEST(WebRtcVideoEncoderFactoryTest, TestMultipleCallsToSupportedCodecs) {
+ WebRtcVideoEncoderFactoryForTest factory;
+ EXPECT_EQ(2u, factory.supported_codecs().size());
+ EXPECT_EQ("H264", factory.supported_codecs()[0].name);
+ EXPECT_EQ("VP8", factory.supported_codecs()[1].name);
+
+ // The codec list doesn't grow when called repeatedly.
+ EXPECT_EQ(2u, factory.supported_codecs().size());
+}
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvideoengine.cc b/third_party/libwebrtc/webrtc/media/engine/webrtcvideoengine.cc
new file mode 100644
index 0000000000..73093db5ad
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvideoengine.cc
@@ -0,0 +1,2616 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/engine/webrtcvideoengine.h"
+
+#include <stdio.h>
+#include <algorithm>
+#include <set>
+#include <string>
+#include <utility>
+
+#include "api/video/i420_buffer.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_decoder.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "call/call.h"
+#include "common_video/h264/profile_level_id.h"
+#include "media/engine/constants.h"
+#include "media/engine/convert_legacy_video_factory.h"
+#include "media/engine/simulcast.h"
+#include "media/engine/webrtcmediaengine.h"
+#include "media/engine/webrtcvoiceengine.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/copyonwritebuffer.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/stringutils.h"
+#include "rtc_base/timeutils.h"
+#include "rtc_base/trace_event.h"
+#include "system_wrappers/include/field_trial.h"
+
+using DegradationPreference = webrtc::VideoSendStream::DegradationPreference;
+
+namespace cricket {
+
+// Hack in order to pass in |receive_stream_id| to legacy clients.
+// TODO(magjed): Remove once WebRtcVideoDecoderFactory is deprecated and
+// webrtc:7925 is fixed.
+class DecoderFactoryAdapter {
+ public:
+ explicit DecoderFactoryAdapter(
+ std::unique_ptr<WebRtcVideoDecoderFactory> external_video_decoder_factory)
+ : cricket_decoder_with_params_(new CricketDecoderWithParams(
+ std::move(external_video_decoder_factory))),
+ decoder_factory_(ConvertVideoDecoderFactory(
+ std::unique_ptr<WebRtcVideoDecoderFactory>(
+ cricket_decoder_with_params_))) {}
+
+ explicit DecoderFactoryAdapter(
+ std::unique_ptr<webrtc::VideoDecoderFactory> video_decoder_factory)
+ : cricket_decoder_with_params_(nullptr),
+ decoder_factory_(std::move(video_decoder_factory)) {}
+
+ void SetReceiveStreamId(const std::string& receive_stream_id) {
+ if (cricket_decoder_with_params_)
+ cricket_decoder_with_params_->SetReceiveStreamId(receive_stream_id);
+ }
+
+ std::vector<webrtc::SdpVideoFormat> GetSupportedFormats() const {
+ return decoder_factory_->GetSupportedFormats();
+ }
+
+ std::unique_ptr<webrtc::VideoDecoder> CreateVideoDecoder(
+ const webrtc::SdpVideoFormat& format) {
+ return decoder_factory_->CreateVideoDecoder(format);
+ }
+
+ private:
+ // WebRtcVideoDecoderFactory implementation that allows to override
+ // |receive_stream_id|.
+ class CricketDecoderWithParams : public WebRtcVideoDecoderFactory {
+ public:
+ explicit CricketDecoderWithParams(
+ std::unique_ptr<WebRtcVideoDecoderFactory> external_decoder_factory)
+ : external_decoder_factory_(std::move(external_decoder_factory)) {}
+
+ void SetReceiveStreamId(const std::string& receive_stream_id) {
+ receive_stream_id_ = receive_stream_id;
+ }
+
+ private:
+ webrtc::VideoDecoder* CreateVideoDecoderWithParams(
+ const VideoCodec& codec,
+ VideoDecoderParams params) override {
+ if (!external_decoder_factory_)
+ return nullptr;
+ params.receive_stream_id = receive_stream_id_;
+ return external_decoder_factory_->CreateVideoDecoderWithParams(codec,
+ params);
+ }
+
+ webrtc::VideoDecoder* CreateVideoDecoderWithParams(
+ webrtc::VideoCodecType type,
+ VideoDecoderParams params) override {
+ RTC_NOTREACHED();
+ return nullptr;
+ }
+
+ void DestroyVideoDecoder(webrtc::VideoDecoder* decoder) override {
+ if (external_decoder_factory_) {
+ external_decoder_factory_->DestroyVideoDecoder(decoder);
+ } else {
+ delete decoder;
+ }
+ }
+
+ const std::unique_ptr<WebRtcVideoDecoderFactory> external_decoder_factory_;
+ std::string receive_stream_id_;
+ };
+
+ // If |cricket_decoder_with_params_| is non-null, it's owned by
+ // |decoder_factory_|.
+ CricketDecoderWithParams* const cricket_decoder_with_params_;
+ std::unique_ptr<webrtc::VideoDecoderFactory> decoder_factory_;
+};
+
+namespace {
+
+// Video decoder class to be used for unknown codecs. Doesn't support decoding
+// but logs messages to LS_ERROR.
+class NullVideoDecoder : public webrtc::VideoDecoder {
+ public:
+ int32_t InitDecode(const webrtc::VideoCodec* codec_settings,
+ int32_t number_of_cores) override {
+ RTC_LOG(LS_ERROR) << "Can't initialize NullVideoDecoder.";
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t Decode(const webrtc::EncodedImage& input_image,
+ bool missing_frames,
+ const webrtc::RTPFragmentationHeader* fragmentation,
+ const webrtc::CodecSpecificInfo* codec_specific_info,
+ int64_t render_time_ms) override {
+ RTC_LOG(LS_ERROR) << "The NullVideoDecoder doesn't support decoding.";
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t RegisterDecodeCompleteCallback(
+ webrtc::DecodedImageCallback* callback) override {
+ RTC_LOG(LS_ERROR)
+ << "Can't register decode complete callback on NullVideoDecoder.";
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t Release() override { return WEBRTC_VIDEO_CODEC_OK; }
+
+ const char* ImplementationName() const override { return "NullVideoDecoder"; }
+};
+
+// If this field trial is enabled, we will enable sending FlexFEC and disable
+// sending ULPFEC whenever the former has been negotiated in the SDPs.
+bool IsFlexfecFieldTrialEnabled() {
+ return webrtc::field_trial::IsEnabled("WebRTC-FlexFEC-03");
+}
+
+// If this field trial is enabled, the "flexfec-03" codec will be advertised
+// as being supported. This means that "flexfec-03" will appear in the default
+// SDP offer, and we therefore need to be ready to receive FlexFEC packets from
+// the remote. It also means that FlexFEC SSRCs will be generated by
+// MediaSession and added as "a=ssrc:" and "a=ssrc-group:" lines in the local
+// SDP.
+bool IsFlexfecAdvertisedFieldTrialEnabled() {
+ return webrtc::field_trial::IsEnabled("WebRTC-FlexFEC-03-Advertised");
+}
+
+void AddDefaultFeedbackParams(VideoCodec* codec) {
+ // Don't add any feedback params for RED and ULPFEC.
+ if (codec->name == kRedCodecName || codec->name == kUlpfecCodecName)
+ return;
+ codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamRemb, kParamValueEmpty));
+ codec->AddFeedbackParam(
+ FeedbackParam(kRtcpFbParamTransportCc, kParamValueEmpty));
+ // Don't add any more feedback params for FLEXFEC.
+ if (codec->name == kFlexfecCodecName)
+ return;
+ codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamCcm, kRtcpFbCcmParamFir));
+ codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamNack, kParamValueEmpty));
+ codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamNack, kRtcpFbNackParamPli));
+}
+
+
+// This function will assign dynamic payload types (in the range [96, 127]) to
+// the input codecs, and also add ULPFEC, RED, FlexFEC, and associated RTX
+// codecs for recognized codecs (VP8, VP9, H264, and RED). It will also add
+// default feedback params to the codecs.
+std::vector<VideoCodec> AssignPayloadTypesAndDefaultCodecs(
+ std::vector<webrtc::SdpVideoFormat> input_formats) {
+ if (input_formats.empty())
+ return std::vector<VideoCodec>();
+ static const int kFirstDynamicPayloadType = 96;
+ static const int kLastDynamicPayloadType = 127;
+ int payload_type = kFirstDynamicPayloadType;
+
+ input_formats.push_back(webrtc::SdpVideoFormat(kRedCodecName));
+ input_formats.push_back(webrtc::SdpVideoFormat(kUlpfecCodecName));
+
+ if (IsFlexfecAdvertisedFieldTrialEnabled()) {
+ webrtc::SdpVideoFormat flexfec_format(kFlexfecCodecName);
+ // This value is currently arbitrarily set to 10 seconds. (The unit
+ // is microseconds.) This parameter MUST be present in the SDP, but
+ // we never use the actual value anywhere in our code however.
+ // TODO(brandtr): Consider honouring this value in the sender and receiver.
+ flexfec_format.parameters = {{kFlexfecFmtpRepairWindow, "10000000"}};
+ input_formats.push_back(flexfec_format);
+ }
+
+ std::vector<VideoCodec> output_codecs;
+ for (const webrtc::SdpVideoFormat& format : input_formats) {
+ VideoCodec codec(format);
+ codec.id = payload_type;
+ AddDefaultFeedbackParams(&codec);
+ output_codecs.push_back(codec);
+
+ // Increment payload type.
+ ++payload_type;
+ if (payload_type > kLastDynamicPayloadType)
+ break;
+
+ // Add associated RTX codec for recognized codecs.
+ // TODO(deadbeef): Should we add RTX codecs for external codecs whose names
+ // we don't recognize?
+ if (CodecNamesEq(codec.name, kVp8CodecName) ||
+ CodecNamesEq(codec.name, kVp9CodecName) ||
+ CodecNamesEq(codec.name, kH264CodecName) ||
+ CodecNamesEq(codec.name, kRedCodecName)) {
+ output_codecs.push_back(
+ VideoCodec::CreateRtxCodec(payload_type, codec.id));
+
+ // Increment payload type.
+ ++payload_type;
+ if (payload_type > kLastDynamicPayloadType)
+ break;
+ }
+ }
+ return output_codecs;
+}
+
+std::vector<VideoCodec> AssignPayloadTypesAndDefaultCodecs(
+ const webrtc::VideoEncoderFactory* encoder_factory) {
+ return encoder_factory ? AssignPayloadTypesAndDefaultCodecs(
+ encoder_factory->GetSupportedFormats())
+ : std::vector<VideoCodec>();
+}
+
+static std::string CodecVectorToString(const std::vector<VideoCodec>& codecs) {
+ std::stringstream out;
+ out << '{';
+ for (size_t i = 0; i < codecs.size(); ++i) {
+ out << codecs[i].ToString();
+ if (i != codecs.size() - 1) {
+ out << ", ";
+ }
+ }
+ out << '}';
+ return out.str();
+}
+
+static bool ValidateCodecFormats(const std::vector<VideoCodec>& codecs) {
+ bool has_video = false;
+ for (size_t i = 0; i < codecs.size(); ++i) {
+ if (!codecs[i].ValidateCodecFormat()) {
+ return false;
+ }
+ if (codecs[i].GetCodecType() == VideoCodec::CODEC_VIDEO) {
+ has_video = true;
+ }
+ }
+ if (!has_video) {
+ RTC_LOG(LS_ERROR) << "Setting codecs without a video codec is invalid: "
+ << CodecVectorToString(codecs);
+ return false;
+ }
+ return true;
+}
+
+static bool ValidateStreamParams(const StreamParams& sp) {
+ if (sp.ssrcs.empty()) {
+ RTC_LOG(LS_ERROR) << "No SSRCs in stream parameters: " << sp.ToString();
+ return false;
+ }
+
+ std::vector<uint32_t> primary_ssrcs;
+ sp.GetPrimarySsrcs(&primary_ssrcs);
+ std::vector<uint32_t> rtx_ssrcs;
+ sp.GetFidSsrcs(primary_ssrcs, &rtx_ssrcs);
+ for (uint32_t rtx_ssrc : rtx_ssrcs) {
+ bool rtx_ssrc_present = false;
+ for (uint32_t sp_ssrc : sp.ssrcs) {
+ if (sp_ssrc == rtx_ssrc) {
+ rtx_ssrc_present = true;
+ break;
+ }
+ }
+ if (!rtx_ssrc_present) {
+ RTC_LOG(LS_ERROR) << "RTX SSRC '" << rtx_ssrc
+ << "' missing from StreamParams ssrcs: "
+ << sp.ToString();
+ return false;
+ }
+ }
+ if (!rtx_ssrcs.empty() && primary_ssrcs.size() != rtx_ssrcs.size()) {
+ RTC_LOG(LS_ERROR)
+ << "RTX SSRCs exist, but don't cover all SSRCs (unsupported): "
+ << sp.ToString();
+ return false;
+ }
+
+ return true;
+}
+
+// Returns true if the given codec is disallowed from doing simulcast.
+bool IsCodecBlacklistedForSimulcast(const std::string& codec_name) {
+ return CodecNamesEq(codec_name, kH264CodecName) ||
+ CodecNamesEq(codec_name, kVp9CodecName);
+}
+
+// The selected thresholds for QVGA and VGA corresponded to a QP around 10.
+// The change in QP declined above the selected bitrates.
+static int GetMaxDefaultVideoBitrateKbps(int width, int height) {
+ if (width * height <= 320 * 240) {
+ return 600;
+ } else if (width * height <= 640 * 480) {
+ return 1700;
+ } else if (width * height <= 960 * 540) {
+ return 2000;
+ } else {
+ return 2500;
+ }
+}
+
+bool GetVp9LayersFromFieldTrialGroup(int* num_spatial_layers,
+ int* num_temporal_layers) {
+ std::string group = webrtc::field_trial::FindFullName("WebRTC-SupportVP9SVC");
+ if (group.empty())
+ return false;
+
+ if (sscanf(group.c_str(), "EnabledByFlag_%dSL%dTL", num_spatial_layers,
+ num_temporal_layers) != 2) {
+ return false;
+ }
+ const int kMaxSpatialLayers = 2;
+ if (*num_spatial_layers > kMaxSpatialLayers || *num_spatial_layers < 1)
+ return false;
+
+ const int kMaxTemporalLayers = 3;
+ if (*num_temporal_layers > kMaxTemporalLayers || *num_temporal_layers < 1)
+ return false;
+
+ return true;
+}
+
+int GetDefaultVp9SpatialLayers() {
+ int num_sl;
+ int num_tl;
+ if (GetVp9LayersFromFieldTrialGroup(&num_sl, &num_tl)) {
+ return num_sl;
+ }
+ return 1;
+}
+
+int GetDefaultVp9TemporalLayers() {
+ int num_sl;
+ int num_tl;
+ if (GetVp9LayersFromFieldTrialGroup(&num_sl, &num_tl)) {
+ return num_tl;
+ }
+ return 1;
+}
+
+const char kForcedFallbackFieldTrial[] =
+ "WebRTC-VP8-Forced-Fallback-Encoder-v2";
+
+rtc::Optional<int> GetFallbackMinBpsFromFieldTrial() {
+ if (!webrtc::field_trial::IsEnabled(kForcedFallbackFieldTrial))
+ return rtc::nullopt;
+
+ std::string group =
+ webrtc::field_trial::FindFullName(kForcedFallbackFieldTrial);
+ if (group.empty())
+ return rtc::nullopt;
+
+ int min_pixels;
+ int max_pixels;
+ int min_bps;
+ if (sscanf(group.c_str(), "Enabled-%d,%d,%d", &min_pixels, &max_pixels,
+ &min_bps) != 3) {
+ return rtc::nullopt;
+ }
+
+ if (min_bps <= 0)
+ return rtc::nullopt;
+
+ return min_bps;
+}
+
+int GetMinVideoBitrateBps() {
+ return GetFallbackMinBpsFromFieldTrial().value_or(kMinVideoBitrateBps);
+}
+} // namespace
+
+// This constant is really an on/off, lower-level configurable NACK history
+// duration hasn't been implemented.
+static const int kNackHistoryMs = 1000;
+
+static const int kDefaultRtcpReceiverReportSsrc = 1;
+
+// Minimum time interval for logging stats.
+static const int64_t kStatsLogIntervalMs = 10000;
+
+rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings>
+WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings(
+ const VideoCodec& codec) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ bool is_screencast = parameters_.options.is_screencast.value_or(false);
+ // No automatic resizing when using simulcast or screencast.
+ bool automatic_resize =
+ !is_screencast && parameters_.config.rtp.ssrcs.size() == 1;
+ bool frame_dropping = !is_screencast;
+ bool denoising;
+ bool codec_default_denoising = false;
+ if (is_screencast) {
+ denoising = false;
+ } else {
+ // Use codec default if video_noise_reduction is unset.
+ codec_default_denoising = !parameters_.options.video_noise_reduction;
+ denoising = parameters_.options.video_noise_reduction.value_or(false);
+ }
+
+ if (CodecNamesEq(codec.name, kH264CodecName)) {
+ webrtc::VideoCodecH264 h264_settings =
+ webrtc::VideoEncoder::GetDefaultH264Settings();
+ h264_settings.frameDroppingOn = frame_dropping;
+ return new rtc::RefCountedObject<
+ webrtc::VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings);
+ }
+ if (CodecNamesEq(codec.name, kVp8CodecName)) {
+ webrtc::VideoCodecVP8 vp8_settings =
+ webrtc::VideoEncoder::GetDefaultVp8Settings();
+ vp8_settings.automaticResizeOn = automatic_resize;
+ // VP8 denoising is enabled by default.
+ vp8_settings.denoisingOn = codec_default_denoising ? true : denoising;
+ vp8_settings.frameDroppingOn = frame_dropping;
+ return new rtc::RefCountedObject<
+ webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
+ }
+ if (CodecNamesEq(codec.name, kVp9CodecName)) {
+ webrtc::VideoCodecVP9 vp9_settings =
+ webrtc::VideoEncoder::GetDefaultVp9Settings();
+ if (is_screencast) {
+ // TODO(asapersson): Set to 2 for now since there is a DCHECK in
+ // VideoSendStream::ReconfigureVideoEncoder.
+ vp9_settings.numberOfSpatialLayers = 2;
+ } else {
+ vp9_settings.numberOfSpatialLayers = GetDefaultVp9SpatialLayers();
+ }
+ // VP9 denoising is disabled by default.
+ vp9_settings.denoisingOn = codec_default_denoising ? true : denoising;
+ vp9_settings.frameDroppingOn = frame_dropping;
+ vp9_settings.automaticResizeOn = automatic_resize;
+ return new rtc::RefCountedObject<
+ webrtc::VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
+ }
+ return nullptr;
+}
+
+DefaultUnsignalledSsrcHandler::DefaultUnsignalledSsrcHandler()
+ : default_sink_(nullptr) {}
+
+UnsignalledSsrcHandler::Action DefaultUnsignalledSsrcHandler::OnUnsignalledSsrc(
+ WebRtcVideoChannel* channel,
+ uint32_t ssrc) {
+ rtc::Optional<uint32_t> default_recv_ssrc =
+ channel->GetDefaultReceiveStreamSsrc();
+
+ if (default_recv_ssrc) {
+ RTC_LOG(LS_INFO) << "Destroying old default receive stream for SSRC="
+ << ssrc << ".";
+ channel->RemoveRecvStream(*default_recv_ssrc);
+ }
+
+ StreamParams sp;
+ sp.ssrcs.push_back(ssrc);
+ RTC_LOG(LS_INFO) << "Creating default receive stream for SSRC=" << ssrc
+ << ".";
+ if (!channel->AddRecvStream(sp, true)) {
+ RTC_LOG(LS_WARNING) << "Could not create default receive stream.";
+ }
+
+ channel->SetSink(ssrc, default_sink_);
+ return kDeliverPacket;
+}
+
+rtc::VideoSinkInterface<webrtc::VideoFrame>*
+DefaultUnsignalledSsrcHandler::GetDefaultSink() const {
+ return default_sink_;
+}
+
+void DefaultUnsignalledSsrcHandler::SetDefaultSink(
+ WebRtcVideoChannel* channel,
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ default_sink_ = sink;
+ rtc::Optional<uint32_t> default_recv_ssrc =
+ channel->GetDefaultReceiveStreamSsrc();
+ if (default_recv_ssrc) {
+ channel->SetSink(*default_recv_ssrc, default_sink_);
+ }
+}
+
+WebRtcVideoEngine::WebRtcVideoEngine(
+ std::unique_ptr<WebRtcVideoEncoderFactory> external_video_encoder_factory,
+ std::unique_ptr<WebRtcVideoDecoderFactory> external_video_decoder_factory)
+ : decoder_factory_(
+ new DecoderFactoryAdapter(std::move(external_video_decoder_factory))),
+ encoder_factory_(ConvertVideoEncoderFactory(
+ std::move(external_video_encoder_factory))) {
+ RTC_LOG(LS_INFO) << "WebRtcVideoEngine::WebRtcVideoEngine()";
+}
+
+WebRtcVideoEngine::WebRtcVideoEngine(
+ std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory,
+ std::unique_ptr<webrtc::VideoDecoderFactory> video_decoder_factory)
+ : decoder_factory_(
+ new DecoderFactoryAdapter(std::move(video_decoder_factory))),
+ encoder_factory_(std::move(video_encoder_factory)) {
+ RTC_LOG(LS_INFO) << "WebRtcVideoEngine::WebRtcVideoEngine()";
+}
+
+WebRtcVideoEngine::~WebRtcVideoEngine() {
+ RTC_LOG(LS_INFO) << "WebRtcVideoEngine::~WebRtcVideoEngine";
+}
+
+WebRtcVideoChannel* WebRtcVideoEngine::CreateChannel(
+ webrtc::Call* call,
+ const MediaConfig& config,
+ const VideoOptions& options) {
+ RTC_LOG(LS_INFO) << "CreateChannel. Options: " << options.ToString();
+ return new WebRtcVideoChannel(call, config, options, encoder_factory_.get(),
+ decoder_factory_.get());
+}
+
+std::vector<VideoCodec> WebRtcVideoEngine::codecs() const {
+ return AssignPayloadTypesAndDefaultCodecs(encoder_factory_.get());
+}
+
+RtpCapabilities WebRtcVideoEngine::GetCapabilities() const {
+ RtpCapabilities capabilities;
+ capabilities.header_extensions.push_back(
+ webrtc::RtpExtension(webrtc::RtpExtension::kTimestampOffsetUri,
+ webrtc::RtpExtension::kTimestampOffsetDefaultId));
+ capabilities.header_extensions.push_back(
+ webrtc::RtpExtension(webrtc::RtpExtension::kAbsSendTimeUri,
+ webrtc::RtpExtension::kAbsSendTimeDefaultId));
+ capabilities.header_extensions.push_back(
+ webrtc::RtpExtension(webrtc::RtpExtension::kVideoRotationUri,
+ webrtc::RtpExtension::kVideoRotationDefaultId));
+ capabilities.header_extensions.push_back(webrtc::RtpExtension(
+ webrtc::RtpExtension::kTransportSequenceNumberUri,
+ webrtc::RtpExtension::kTransportSequenceNumberDefaultId));
+ capabilities.header_extensions.push_back(
+ webrtc::RtpExtension(webrtc::RtpExtension::kPlayoutDelayUri,
+ webrtc::RtpExtension::kPlayoutDelayDefaultId));
+ capabilities.header_extensions.push_back(
+ webrtc::RtpExtension(webrtc::RtpExtension::kVideoContentTypeUri,
+ webrtc::RtpExtension::kVideoContentTypeDefaultId));
+ capabilities.header_extensions.push_back(
+ webrtc::RtpExtension(webrtc::RtpExtension::kVideoTimingUri,
+ webrtc::RtpExtension::kVideoTimingDefaultId));
+ return capabilities;
+}
+
+WebRtcVideoChannel::WebRtcVideoChannel(
+ webrtc::Call* call,
+ const MediaConfig& config,
+ const VideoOptions& options,
+ webrtc::VideoEncoderFactory* encoder_factory,
+ DecoderFactoryAdapter* decoder_factory)
+ : VideoMediaChannel(config),
+ call_(call),
+ unsignalled_ssrc_handler_(&default_unsignalled_ssrc_handler_),
+ video_config_(config.video),
+ encoder_factory_(encoder_factory),
+ decoder_factory_(decoder_factory),
+ default_send_options_(options),
+ last_stats_log_ms_(-1) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+
+ rtcp_receiver_report_ssrc_ = kDefaultRtcpReceiverReportSsrc;
+ sending_ = false;
+ recv_codecs_ =
+ MapCodecs(AssignPayloadTypesAndDefaultCodecs(encoder_factory_));
+ recv_flexfec_payload_type_ = recv_codecs_.front().flexfec_payload_type;
+}
+
+WebRtcVideoChannel::~WebRtcVideoChannel() {
+ for (auto& kv : send_streams_)
+ delete kv.second;
+ for (auto& kv : receive_streams_)
+ delete kv.second;
+}
+
+rtc::Optional<WebRtcVideoChannel::VideoCodecSettings>
+WebRtcVideoChannel::SelectSendVideoCodec(
+ const std::vector<VideoCodecSettings>& remote_mapped_codecs) const {
+ const std::vector<VideoCodec> local_supported_codecs =
+ AssignPayloadTypesAndDefaultCodecs(encoder_factory_);
+ // Select the first remote codec that is supported locally.
+ for (const VideoCodecSettings& remote_mapped_codec : remote_mapped_codecs) {
+ // For H264, we will limit the encode level to the remote offered level
+ // regardless if level asymmetry is allowed or not. This is strictly not
+ // following the spec in https://tools.ietf.org/html/rfc6184#section-8.2.2
+ // since we should limit the encode level to the lower of local and remote
+ // level when level asymmetry is not allowed.
+ if (FindMatchingCodec(local_supported_codecs, remote_mapped_codec.codec))
+ return remote_mapped_codec;
+ }
+ // No remote codec was supported.
+ return rtc::nullopt;
+}
+
+bool WebRtcVideoChannel::NonFlexfecReceiveCodecsHaveChanged(
+ std::vector<VideoCodecSettings> before,
+ std::vector<VideoCodecSettings> after) {
+ if (before.size() != after.size()) {
+ return true;
+ }
+
+ // The receive codec order doesn't matter, so we sort the codecs before
+ // comparing. This is necessary because currently the
+ // only way to change the send codec is to munge SDP, which causes
+ // the receive codec list to change order, which causes the streams
+ // to be recreates which causes a "blink" of black video. In order
+ // to support munging the SDP in this way without recreating receive
+ // streams, we ignore the order of the received codecs so that
+ // changing the order doesn't cause this "blink".
+ auto comparison =
+ [](const VideoCodecSettings& codec1, const VideoCodecSettings& codec2) {
+ return codec1.codec.id > codec2.codec.id;
+ };
+ std::sort(before.begin(), before.end(), comparison);
+ std::sort(after.begin(), after.end(), comparison);
+
+ // Changes in FlexFEC payload type are handled separately in
+ // WebRtcVideoChannel::GetChangedRecvParameters, so disregard FlexFEC in the
+ // comparison here.
+ return !std::equal(before.begin(), before.end(), after.begin(),
+ VideoCodecSettings::EqualsDisregardingFlexfec);
+}
+
+bool WebRtcVideoChannel::GetChangedSendParameters(
+ const VideoSendParameters& params,
+ ChangedSendParameters* changed_params) const {
+ if (!ValidateCodecFormats(params.codecs) ||
+ !ValidateRtpExtensions(params.extensions)) {
+ return false;
+ }
+
+ // Select one of the remote codecs that will be used as send codec.
+ rtc::Optional<VideoCodecSettings> selected_send_codec =
+ SelectSendVideoCodec(MapCodecs(params.codecs));
+
+ if (!selected_send_codec) {
+ RTC_LOG(LS_ERROR) << "No video codecs supported.";
+ return false;
+ }
+
+ // Never enable sending FlexFEC, unless we are in the experiment.
+ if (!IsFlexfecFieldTrialEnabled()) {
+ if (selected_send_codec->flexfec_payload_type != -1) {
+ RTC_LOG(LS_INFO)
+ << "Remote supports flexfec-03, but we will not send since "
+ << "WebRTC-FlexFEC-03 field trial is not enabled.";
+ }
+ selected_send_codec->flexfec_payload_type = -1;
+ }
+
+ if (!send_codec_ || *selected_send_codec != *send_codec_)
+ changed_params->codec = selected_send_codec;
+
+ // Handle RTP header extensions.
+ std::vector<webrtc::RtpExtension> filtered_extensions = FilterRtpExtensions(
+ params.extensions, webrtc::RtpExtension::IsSupportedForVideo, true);
+ if (!send_rtp_extensions_ || (*send_rtp_extensions_ != filtered_extensions)) {
+ changed_params->rtp_header_extensions =
+ rtc::Optional<std::vector<webrtc::RtpExtension>>(filtered_extensions);
+ }
+
+ // Handle max bitrate.
+ if (params.max_bandwidth_bps != send_params_.max_bandwidth_bps &&
+ params.max_bandwidth_bps >= -1) {
+ // 0 or -1 uncaps max bitrate.
+ // TODO(pbos): Reconsider how 0 should be treated. It is not mentioned as a
+ // special value and might very well be used for stopping sending.
+ changed_params->max_bandwidth_bps =
+ params.max_bandwidth_bps == 0 ? -1 : params.max_bandwidth_bps;
+ }
+
+ // Handle conference mode.
+ if (params.conference_mode != send_params_.conference_mode) {
+ changed_params->conference_mode = params.conference_mode;
+ }
+
+ // Handle RTCP mode.
+ if (params.rtcp.reduced_size != send_params_.rtcp.reduced_size) {
+ changed_params->rtcp_mode = params.rtcp.reduced_size
+ ? webrtc::RtcpMode::kReducedSize
+ : webrtc::RtcpMode::kCompound;
+ }
+
+ return true;
+}
+
+rtc::DiffServCodePoint WebRtcVideoChannel::PreferredDscp() const {
+ return rtc::DSCP_AF41;
+}
+
+bool WebRtcVideoChannel::SetSendParameters(const VideoSendParameters& params) {
+ TRACE_EVENT0("webrtc", "WebRtcVideoChannel::SetSendParameters");
+ RTC_LOG(LS_INFO) << "SetSendParameters: " << params.ToString();
+ ChangedSendParameters changed_params;
+ if (!GetChangedSendParameters(params, &changed_params)) {
+ return false;
+ }
+
+ if (changed_params.codec) {
+ const VideoCodecSettings& codec_settings = *changed_params.codec;
+ send_codec_ = codec_settings;
+ RTC_LOG(LS_INFO) << "Using codec: " << codec_settings.codec.ToString();
+ }
+
+ if (changed_params.rtp_header_extensions) {
+ send_rtp_extensions_ = changed_params.rtp_header_extensions;
+ }
+
+ if (changed_params.codec || changed_params.max_bandwidth_bps) {
+ if (params.max_bandwidth_bps == -1) {
+ // Unset the global max bitrate (max_bitrate_bps) if max_bandwidth_bps is
+ // -1, which corresponds to no "b=AS" attribute in SDP. Note that the
+ // global max bitrate may be set below in GetBitrateConfigForCodec, from
+ // the codec max bitrate.
+ // TODO(pbos): This should be reconsidered (codec max bitrate should
+ // probably not affect global call max bitrate).
+ bitrate_config_.max_bitrate_bps = -1;
+ }
+ if (send_codec_) {
+ // TODO(holmer): Changing the codec parameters shouldn't necessarily mean
+ // that we change the min/max of bandwidth estimation. Reevaluate this.
+ bitrate_config_ = GetBitrateConfigForCodec(send_codec_->codec);
+ if (!changed_params.codec) {
+ // If the codec isn't changing, set the start bitrate to -1 which means
+ // "unchanged" so that BWE isn't affected.
+ bitrate_config_.start_bitrate_bps = -1;
+ }
+ }
+ if (params.max_bandwidth_bps >= 0) {
+ // Note that max_bandwidth_bps intentionally takes priority over the
+ // bitrate config for the codec. This allows FEC to be applied above the
+ // codec target bitrate.
+ // TODO(pbos): Figure out whether b=AS means max bitrate for this
+ // WebRtcVideoChannel (in which case we're good), or per sender (SSRC),
+ // in which case this should not set a Call::BitrateConfig but rather
+ // reconfigure all senders.
+ bitrate_config_.max_bitrate_bps =
+ params.max_bandwidth_bps == 0 ? -1 : params.max_bandwidth_bps;
+ }
+ call_->SetBitrateConfig(bitrate_config_);
+ }
+
+ {
+ rtc::CritScope stream_lock(&stream_crit_);
+ for (auto& kv : send_streams_) {
+ kv.second->SetSendParameters(changed_params);
+ }
+ if (changed_params.codec || changed_params.rtcp_mode) {
+ // Update receive feedback parameters from new codec or RTCP mode.
+ RTC_LOG(LS_INFO)
+ << "SetFeedbackOptions on all the receive streams because the send "
+ "codec or RTCP mode has changed.";
+ for (auto& kv : receive_streams_) {
+ RTC_DCHECK(kv.second != nullptr);
+ kv.second->SetFeedbackParameters(
+ HasNack(send_codec_->codec), HasRemb(send_codec_->codec),
+ HasTransportCc(send_codec_->codec),
+ params.rtcp.reduced_size ? webrtc::RtcpMode::kReducedSize
+ : webrtc::RtcpMode::kCompound);
+ }
+ }
+ }
+ send_params_ = params;
+ return true;
+}
+
+webrtc::RtpParameters WebRtcVideoChannel::GetRtpSendParameters(
+ uint32_t ssrc) const {
+ rtc::CritScope stream_lock(&stream_crit_);
+ auto it = send_streams_.find(ssrc);
+ if (it == send_streams_.end()) {
+ RTC_LOG(LS_WARNING) << "Attempting to get RTP send parameters for stream "
+ << "with ssrc " << ssrc << " which doesn't exist.";
+ return webrtc::RtpParameters();
+ }
+
+ webrtc::RtpParameters rtp_params = it->second->GetRtpParameters();
+ // Need to add the common list of codecs to the send stream-specific
+ // RTP parameters.
+ for (const VideoCodec& codec : send_params_.codecs) {
+ rtp_params.codecs.push_back(codec.ToCodecParameters());
+ }
+ return rtp_params;
+}
+
+bool WebRtcVideoChannel::SetRtpSendParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ TRACE_EVENT0("webrtc", "WebRtcVideoChannel::SetRtpSendParameters");
+ rtc::CritScope stream_lock(&stream_crit_);
+ auto it = send_streams_.find(ssrc);
+ if (it == send_streams_.end()) {
+ RTC_LOG(LS_ERROR) << "Attempting to set RTP send parameters for stream "
+ << "with ssrc " << ssrc << " which doesn't exist.";
+ return false;
+ }
+
+ // TODO(deadbeef): Handle setting parameters with a list of codecs in a
+ // different order (which should change the send codec).
+ webrtc::RtpParameters current_parameters = GetRtpSendParameters(ssrc);
+ if (current_parameters.codecs != parameters.codecs) {
+ RTC_LOG(LS_ERROR) << "Using SetParameters to change the set of codecs "
+ << "is not currently supported.";
+ return false;
+ }
+
+ return it->second->SetRtpParameters(parameters);
+}
+
+webrtc::RtpParameters WebRtcVideoChannel::GetRtpReceiveParameters(
+ uint32_t ssrc) const {
+ webrtc::RtpParameters rtp_params;
+ rtc::CritScope stream_lock(&stream_crit_);
+ // SSRC of 0 represents an unsignaled receive stream.
+ if (ssrc == 0) {
+ if (!default_unsignalled_ssrc_handler_.GetDefaultSink()) {
+ RTC_LOG(LS_WARNING)
+ << "Attempting to get RTP parameters for the default, "
+ "unsignaled video receive stream, but not yet "
+ "configured to receive such a stream.";
+ return rtp_params;
+ }
+ rtp_params.encodings.emplace_back();
+ } else {
+ auto it = receive_streams_.find(ssrc);
+ if (it == receive_streams_.end()) {
+ RTC_LOG(LS_WARNING)
+ << "Attempting to get RTP receive parameters for stream "
+ << "with SSRC " << ssrc << " which doesn't exist.";
+ return webrtc::RtpParameters();
+ }
+ // TODO(deadbeef): Return stream-specific parameters, beyond just SSRC.
+ rtp_params.encodings.emplace_back();
+ rtp_params.encodings[0].ssrc = it->second->GetFirstPrimarySsrc();
+ }
+
+ // Add codecs, which any stream is prepared to receive.
+ for (const VideoCodec& codec : recv_params_.codecs) {
+ rtp_params.codecs.push_back(codec.ToCodecParameters());
+ }
+ return rtp_params;
+}
+
+bool WebRtcVideoChannel::SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ TRACE_EVENT0("webrtc", "WebRtcVideoChannel::SetRtpReceiveParameters");
+ rtc::CritScope stream_lock(&stream_crit_);
+
+ // SSRC of 0 represents an unsignaled receive stream.
+ if (ssrc == 0) {
+ if (!default_unsignalled_ssrc_handler_.GetDefaultSink()) {
+ RTC_LOG(LS_WARNING)
+ << "Attempting to set RTP parameters for the default, "
+ "unsignaled video receive stream, but not yet "
+ "configured to receive such a stream.";
+ return false;
+ }
+ } else {
+ auto it = receive_streams_.find(ssrc);
+ if (it == receive_streams_.end()) {
+ RTC_LOG(LS_WARNING)
+ << "Attempting to set RTP receive parameters for stream "
+ << "with SSRC " << ssrc << " which doesn't exist.";
+ return false;
+ }
+ }
+
+ webrtc::RtpParameters current_parameters = GetRtpReceiveParameters(ssrc);
+ if (current_parameters != parameters) {
+ RTC_LOG(LS_ERROR) << "Changing the RTP receive parameters is currently "
+ << "unsupported.";
+ return false;
+ }
+ return true;
+}
+
+bool WebRtcVideoChannel::GetChangedRecvParameters(
+ const VideoRecvParameters& params,
+ ChangedRecvParameters* changed_params) const {
+ if (!ValidateCodecFormats(params.codecs) ||
+ !ValidateRtpExtensions(params.extensions)) {
+ return false;
+ }
+
+ // Handle receive codecs.
+ const std::vector<VideoCodecSettings> mapped_codecs =
+ MapCodecs(params.codecs);
+ if (mapped_codecs.empty()) {
+ RTC_LOG(LS_ERROR) << "SetRecvParameters called without any video codecs.";
+ return false;
+ }
+
+ // Verify that every mapped codec is supported locally.
+ const std::vector<VideoCodec> local_supported_codecs =
+ AssignPayloadTypesAndDefaultCodecs(encoder_factory_);
+ for (const VideoCodecSettings& mapped_codec : mapped_codecs) {
+ if (!FindMatchingCodec(local_supported_codecs, mapped_codec.codec)) {
+ RTC_LOG(LS_ERROR)
+ << "SetRecvParameters called with unsupported video codec: "
+ << mapped_codec.codec.ToString();
+ return false;
+ }
+ }
+
+ if (NonFlexfecReceiveCodecsHaveChanged(recv_codecs_, mapped_codecs)) {
+ changed_params->codec_settings =
+ rtc::Optional<std::vector<VideoCodecSettings>>(mapped_codecs);
+ }
+
+ // Handle RTP header extensions.
+ std::vector<webrtc::RtpExtension> filtered_extensions = FilterRtpExtensions(
+ params.extensions, webrtc::RtpExtension::IsSupportedForVideo, false);
+ if (filtered_extensions != recv_rtp_extensions_) {
+ changed_params->rtp_header_extensions =
+ rtc::Optional<std::vector<webrtc::RtpExtension>>(filtered_extensions);
+ }
+
+ int flexfec_payload_type = mapped_codecs.front().flexfec_payload_type;
+ if (flexfec_payload_type != recv_flexfec_payload_type_) {
+ changed_params->flexfec_payload_type = flexfec_payload_type;
+ }
+
+ return true;
+}
+
+bool WebRtcVideoChannel::SetRecvParameters(const VideoRecvParameters& params) {
+ TRACE_EVENT0("webrtc", "WebRtcVideoChannel::SetRecvParameters");
+ RTC_LOG(LS_INFO) << "SetRecvParameters: " << params.ToString();
+ ChangedRecvParameters changed_params;
+ if (!GetChangedRecvParameters(params, &changed_params)) {
+ return false;
+ }
+ if (changed_params.flexfec_payload_type) {
+ RTC_LOG(LS_INFO) << "Changing FlexFEC payload type (recv) from "
+ << recv_flexfec_payload_type_ << " to "
+ << *changed_params.flexfec_payload_type;
+ recv_flexfec_payload_type_ = *changed_params.flexfec_payload_type;
+ }
+ if (changed_params.rtp_header_extensions) {
+ recv_rtp_extensions_ = *changed_params.rtp_header_extensions;
+ }
+ if (changed_params.codec_settings) {
+ RTC_LOG(LS_INFO) << "Changing recv codecs from "
+ << CodecSettingsVectorToString(recv_codecs_) << " to "
+ << CodecSettingsVectorToString(
+ *changed_params.codec_settings);
+ recv_codecs_ = *changed_params.codec_settings;
+ }
+
+ {
+ rtc::CritScope stream_lock(&stream_crit_);
+ for (auto& kv : receive_streams_) {
+ kv.second->SetRecvParameters(changed_params);
+ }
+ }
+ recv_params_ = params;
+ return true;
+}
+
+std::string WebRtcVideoChannel::CodecSettingsVectorToString(
+ const std::vector<VideoCodecSettings>& codecs) {
+ std::stringstream out;
+ out << '{';
+ for (size_t i = 0; i < codecs.size(); ++i) {
+ out << codecs[i].codec.ToString();
+ if (i != codecs.size() - 1) {
+ out << ", ";
+ }
+ }
+ out << '}';
+ return out.str();
+}
+
+bool WebRtcVideoChannel::GetSendCodec(VideoCodec* codec) {
+ if (!send_codec_) {
+ RTC_LOG(LS_VERBOSE) << "GetSendCodec: No send codec set.";
+ return false;
+ }
+ *codec = send_codec_->codec;
+ return true;
+}
+
+bool WebRtcVideoChannel::SetSend(bool send) {
+ TRACE_EVENT0("webrtc", "WebRtcVideoChannel::SetSend");
+ RTC_LOG(LS_VERBOSE) << "SetSend: " << (send ? "true" : "false");
+ if (send && !send_codec_) {
+ RTC_LOG(LS_ERROR) << "SetSend(true) called before setting codec.";
+ return false;
+ }
+ {
+ rtc::CritScope stream_lock(&stream_crit_);
+ for (const auto& kv : send_streams_) {
+ kv.second->SetSend(send);
+ }
+ }
+ sending_ = send;
+ return true;
+}
+
+// TODO(nisse): The enable argument was used for mute logic which has
+// been moved to VideoBroadcaster. So remove the argument from this
+// method.
+bool WebRtcVideoChannel::SetVideoSend(
+ uint32_t ssrc,
+ bool enable,
+ const VideoOptions* options,
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source) {
+ TRACE_EVENT0("webrtc", "SetVideoSend");
+ RTC_DCHECK(ssrc != 0);
+ RTC_LOG(LS_INFO) << "SetVideoSend (ssrc= " << ssrc << ", enable = " << enable
+ << ", options: "
+ << (options ? options->ToString() : "nullptr")
+ << ", source = " << (source ? "(source)" : "nullptr") << ")";
+
+ rtc::CritScope stream_lock(&stream_crit_);
+ const auto& kv = send_streams_.find(ssrc);
+ if (kv == send_streams_.end()) {
+ // Allow unknown ssrc only if source is null.
+ RTC_CHECK(source == nullptr);
+ RTC_LOG(LS_ERROR) << "No sending stream on ssrc " << ssrc;
+ return false;
+ }
+
+ return kv->second->SetVideoSend(enable, options, source);
+}
+
+bool WebRtcVideoChannel::ValidateSendSsrcAvailability(
+ const StreamParams& sp) const {
+ for (uint32_t ssrc : sp.ssrcs) {
+ if (send_ssrcs_.find(ssrc) != send_ssrcs_.end()) {
+ RTC_LOG(LS_ERROR) << "Send stream with SSRC '" << ssrc
+ << "' already exists.";
+ return false;
+ }
+ }
+ return true;
+}
+
+bool WebRtcVideoChannel::ValidateReceiveSsrcAvailability(
+ const StreamParams& sp) const {
+ for (uint32_t ssrc : sp.ssrcs) {
+ if (receive_ssrcs_.find(ssrc) != receive_ssrcs_.end()) {
+ RTC_LOG(LS_ERROR) << "Receive stream with SSRC '" << ssrc
+ << "' already exists.";
+ return false;
+ }
+ }
+ return true;
+}
+
+bool WebRtcVideoChannel::AddSendStream(const StreamParams& sp) {
+ RTC_LOG(LS_INFO) << "AddSendStream: " << sp.ToString();
+ if (!ValidateStreamParams(sp))
+ return false;
+
+ rtc::CritScope stream_lock(&stream_crit_);
+
+ if (!ValidateSendSsrcAvailability(sp))
+ return false;
+
+ for (uint32_t used_ssrc : sp.ssrcs)
+ send_ssrcs_.insert(used_ssrc);
+
+ webrtc::VideoSendStream::Config config(this);
+ config.suspend_below_min_bitrate = video_config_.suspend_below_min_bitrate;
+ config.periodic_alr_bandwidth_probing =
+ video_config_.periodic_alr_bandwidth_probing;
+ WebRtcVideoSendStream* stream = new WebRtcVideoSendStream(
+ call_, sp, std::move(config), default_send_options_, encoder_factory_,
+ video_config_.enable_cpu_overuse_detection,
+ bitrate_config_.max_bitrate_bps, send_codec_, send_rtp_extensions_,
+ send_params_);
+
+ uint32_t ssrc = sp.first_ssrc();
+ RTC_DCHECK(ssrc != 0);
+ send_streams_[ssrc] = stream;
+
+ if (rtcp_receiver_report_ssrc_ == kDefaultRtcpReceiverReportSsrc) {
+ rtcp_receiver_report_ssrc_ = ssrc;
+ RTC_LOG(LS_INFO)
+ << "SetLocalSsrc on all the receive streams because we added "
+ "a send stream.";
+ for (auto& kv : receive_streams_)
+ kv.second->SetLocalSsrc(ssrc);
+ }
+ if (sending_) {
+ stream->SetSend(true);
+ }
+
+ return true;
+}
+
+bool WebRtcVideoChannel::RemoveSendStream(uint32_t ssrc) {
+ RTC_LOG(LS_INFO) << "RemoveSendStream: " << ssrc;
+
+ WebRtcVideoSendStream* removed_stream;
+ {
+ rtc::CritScope stream_lock(&stream_crit_);
+ std::map<uint32_t, WebRtcVideoSendStream*>::iterator it =
+ send_streams_.find(ssrc);
+ if (it == send_streams_.end()) {
+ return false;
+ }
+
+ for (uint32_t old_ssrc : it->second->GetSsrcs())
+ send_ssrcs_.erase(old_ssrc);
+
+ removed_stream = it->second;
+ send_streams_.erase(it);
+
+ // Switch receiver report SSRCs, the one in use is no longer valid.
+ if (rtcp_receiver_report_ssrc_ == ssrc) {
+ rtcp_receiver_report_ssrc_ = send_streams_.empty()
+ ? kDefaultRtcpReceiverReportSsrc
+ : send_streams_.begin()->first;
+ RTC_LOG(LS_INFO) << "SetLocalSsrc on all the receive streams because the "
+ "previous local SSRC was removed.";
+
+ for (auto& kv : receive_streams_) {
+ kv.second->SetLocalSsrc(rtcp_receiver_report_ssrc_);
+ }
+ }
+ }
+
+ delete removed_stream;
+
+ return true;
+}
+
+void WebRtcVideoChannel::DeleteReceiveStream(
+ WebRtcVideoChannel::WebRtcVideoReceiveStream* stream) {
+ for (uint32_t old_ssrc : stream->GetSsrcs())
+ receive_ssrcs_.erase(old_ssrc);
+ delete stream;
+}
+
+bool WebRtcVideoChannel::AddRecvStream(const StreamParams& sp) {
+ return AddRecvStream(sp, false);
+}
+
+bool WebRtcVideoChannel::AddRecvStream(const StreamParams& sp,
+ bool default_stream) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+
+ RTC_LOG(LS_INFO) << "AddRecvStream"
+ << (default_stream ? " (default stream)" : "") << ": "
+ << sp.ToString();
+ if (!ValidateStreamParams(sp))
+ return false;
+
+ uint32_t ssrc = sp.first_ssrc();
+ RTC_DCHECK(ssrc != 0); // TODO(pbos): Is this ever valid?
+
+ rtc::CritScope stream_lock(&stream_crit_);
+ // Remove running stream if this was a default stream.
+ const auto& prev_stream = receive_streams_.find(ssrc);
+ if (prev_stream != receive_streams_.end()) {
+ if (default_stream || !prev_stream->second->IsDefaultStream()) {
+ RTC_LOG(LS_ERROR) << "Receive stream for SSRC '" << ssrc
+ << "' already exists.";
+ return false;
+ }
+ DeleteReceiveStream(prev_stream->second);
+ receive_streams_.erase(prev_stream);
+ }
+
+ if (!ValidateReceiveSsrcAvailability(sp))
+ return false;
+
+ for (uint32_t used_ssrc : sp.ssrcs)
+ receive_ssrcs_.insert(used_ssrc);
+
+ webrtc::VideoReceiveStream::Config config(this);
+ webrtc::FlexfecReceiveStream::Config flexfec_config(this);
+ ConfigureReceiverRtp(&config, &flexfec_config, sp);
+
+ config.disable_prerenderer_smoothing =
+ video_config_.disable_prerenderer_smoothing;
+ config.sync_group = sp.sync_label;
+
+ receive_streams_[ssrc] = new WebRtcVideoReceiveStream(
+ call_, sp, std::move(config), decoder_factory_, default_stream,
+ recv_codecs_, flexfec_config);
+
+ return true;
+}
+
+void WebRtcVideoChannel::ConfigureReceiverRtp(
+ webrtc::VideoReceiveStream::Config* config,
+ webrtc::FlexfecReceiveStream::Config* flexfec_config,
+ const StreamParams& sp) const {
+ uint32_t ssrc = sp.first_ssrc();
+
+ config->rtp.remote_ssrc = ssrc;
+ config->rtp.local_ssrc = rtcp_receiver_report_ssrc_;
+
+ // TODO(pbos): This protection is against setting the same local ssrc as
+ // remote which is not permitted by the lower-level API. RTCP requires a
+ // corresponding sender SSRC. Figure out what to do when we don't have
+ // (receive-only) or know a good local SSRC.
+ if (config->rtp.remote_ssrc == config->rtp.local_ssrc) {
+ if (config->rtp.local_ssrc != kDefaultRtcpReceiverReportSsrc) {
+ config->rtp.local_ssrc = kDefaultRtcpReceiverReportSsrc;
+ } else {
+ config->rtp.local_ssrc = kDefaultRtcpReceiverReportSsrc + 1;
+ }
+ }
+
+ // Whether or not the receive stream sends reduced size RTCP is determined
+ // by the send params.
+ // TODO(deadbeef): Once we change "send_params" to "sender_params" and
+ // "recv_params" to "receiver_params", we should get this out of
+ // receiver_params_.
+ config->rtp.rtcp_mode = send_params_.rtcp.reduced_size
+ ? webrtc::RtcpMode::kReducedSize
+ : webrtc::RtcpMode::kCompound;
+
+ config->rtp.remb = send_codec_ ? HasRemb(send_codec_->codec) : false;
+ config->rtp.transport_cc =
+ send_codec_ ? HasTransportCc(send_codec_->codec) : false;
+
+ sp.GetFidSsrc(ssrc, &config->rtp.rtx_ssrc);
+
+ config->rtp.extensions = recv_rtp_extensions_;
+
+ // TODO(brandtr): Generalize when we add support for multistream protection.
+ flexfec_config->payload_type = recv_flexfec_payload_type_;
+ if (IsFlexfecAdvertisedFieldTrialEnabled() &&
+ sp.GetFecFrSsrc(ssrc, &flexfec_config->remote_ssrc)) {
+ flexfec_config->protected_media_ssrcs = {ssrc};
+ flexfec_config->local_ssrc = config->rtp.local_ssrc;
+ flexfec_config->rtcp_mode = config->rtp.rtcp_mode;
+ // TODO(brandtr): We should be spec-compliant and set |transport_cc| here
+ // based on the rtcp-fb for the FlexFEC codec, not the media codec.
+ flexfec_config->transport_cc = config->rtp.transport_cc;
+ flexfec_config->rtp_header_extensions = config->rtp.extensions;
+ }
+}
+
+bool WebRtcVideoChannel::RemoveRecvStream(uint32_t ssrc) {
+ RTC_LOG(LS_INFO) << "RemoveRecvStream: " << ssrc;
+ if (ssrc == 0) {
+ RTC_LOG(LS_ERROR) << "RemoveRecvStream with 0 ssrc is not supported.";
+ return false;
+ }
+
+ rtc::CritScope stream_lock(&stream_crit_);
+ std::map<uint32_t, WebRtcVideoReceiveStream*>::iterator stream =
+ receive_streams_.find(ssrc);
+ if (stream == receive_streams_.end()) {
+ RTC_LOG(LS_ERROR) << "Stream not found for ssrc: " << ssrc;
+ return false;
+ }
+ DeleteReceiveStream(stream->second);
+ receive_streams_.erase(stream);
+
+ return true;
+}
+
+bool WebRtcVideoChannel::SetSink(
+ uint32_t ssrc,
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ RTC_LOG(LS_INFO) << "SetSink: ssrc:" << ssrc << " "
+ << (sink ? "(ptr)" : "nullptr");
+ if (ssrc == 0) {
+ // Do not hold |stream_crit_| here, since SetDefaultSink will call
+ // WebRtcVideoChannel::GetDefaultReceiveStreamSsrc().
+ default_unsignalled_ssrc_handler_.SetDefaultSink(this, sink);
+ return true;
+ }
+
+ rtc::CritScope stream_lock(&stream_crit_);
+ std::map<uint32_t, WebRtcVideoReceiveStream*>::iterator it =
+ receive_streams_.find(ssrc);
+ if (it == receive_streams_.end()) {
+ return false;
+ }
+
+ it->second->SetSink(sink);
+ return true;
+}
+
+bool WebRtcVideoChannel::GetStats(VideoMediaInfo* info) {
+ TRACE_EVENT0("webrtc", "WebRtcVideoChannel::GetStats");
+
+ // Log stats periodically.
+ bool log_stats = false;
+ int64_t now_ms = rtc::TimeMillis();
+ if (last_stats_log_ms_ == -1 ||
+ now_ms - last_stats_log_ms_ > kStatsLogIntervalMs) {
+ last_stats_log_ms_ = now_ms;
+ log_stats = true;
+ }
+
+ info->Clear();
+ FillSenderStats(info, log_stats);
+ FillReceiverStats(info, log_stats);
+ FillSendAndReceiveCodecStats(info);
+ // TODO(holmer): We should either have rtt available as a metric on
+ // VideoSend/ReceiveStreams, or we should remove rtt from VideoSenderInfo.
+ webrtc::Call::Stats stats = call_->GetStats();
+ if (stats.rtt_ms != -1) {
+ for (size_t i = 0; i < info->senders.size(); ++i) {
+ info->senders[i].rtt_ms = stats.rtt_ms;
+ }
+ }
+
+ if (log_stats)
+ RTC_LOG(LS_INFO) << stats.ToString(now_ms);
+
+ return true;
+}
+
+void WebRtcVideoChannel::FillSenderStats(VideoMediaInfo* video_media_info,
+ bool log_stats) {
+ rtc::CritScope stream_lock(&stream_crit_);
+ for (std::map<uint32_t, WebRtcVideoSendStream*>::iterator it =
+ send_streams_.begin();
+ it != send_streams_.end(); ++it) {
+ video_media_info->senders.push_back(
+ it->second->GetVideoSenderInfo(log_stats));
+ }
+}
+
+void WebRtcVideoChannel::FillReceiverStats(VideoMediaInfo* video_media_info,
+ bool log_stats) {
+ rtc::CritScope stream_lock(&stream_crit_);
+ for (std::map<uint32_t, WebRtcVideoReceiveStream*>::iterator it =
+ receive_streams_.begin();
+ it != receive_streams_.end(); ++it) {
+ video_media_info->receivers.push_back(
+ it->second->GetVideoReceiverInfo(log_stats));
+ }
+}
+
+void WebRtcVideoChannel::FillBitrateInfo(BandwidthEstimationInfo* bwe_info) {
+ rtc::CritScope stream_lock(&stream_crit_);
+ for (std::map<uint32_t, WebRtcVideoSendStream*>::iterator stream =
+ send_streams_.begin();
+ stream != send_streams_.end(); ++stream) {
+ stream->second->FillBitrateInfo(bwe_info);
+ }
+}
+
+void WebRtcVideoChannel::FillSendAndReceiveCodecStats(
+ VideoMediaInfo* video_media_info) {
+ for (const VideoCodec& codec : send_params_.codecs) {
+ webrtc::RtpCodecParameters codec_params = codec.ToCodecParameters();
+ video_media_info->send_codecs.insert(
+ std::make_pair(codec_params.payload_type, std::move(codec_params)));
+ }
+ for (const VideoCodec& codec : recv_params_.codecs) {
+ webrtc::RtpCodecParameters codec_params = codec.ToCodecParameters();
+ video_media_info->receive_codecs.insert(
+ std::make_pair(codec_params.payload_type, std::move(codec_params)));
+ }
+}
+
+void WebRtcVideoChannel::OnPacketReceived(
+ rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) {
+ const webrtc::PacketTime webrtc_packet_time(packet_time.timestamp,
+ packet_time.not_before);
+ const webrtc::PacketReceiver::DeliveryStatus delivery_result =
+ call_->Receiver()->DeliverPacket(
+ webrtc::MediaType::VIDEO,
+ packet->cdata(), packet->size(),
+ webrtc_packet_time);
+ switch (delivery_result) {
+ case webrtc::PacketReceiver::DELIVERY_OK:
+ return;
+ case webrtc::PacketReceiver::DELIVERY_PACKET_ERROR:
+ return;
+ case webrtc::PacketReceiver::DELIVERY_UNKNOWN_SSRC:
+ break;
+ }
+
+ uint32_t ssrc = 0;
+ if (!GetRtpSsrc(packet->cdata(), packet->size(), &ssrc)) {
+ return;
+ }
+
+ int payload_type = 0;
+ if (!GetRtpPayloadType(packet->cdata(), packet->size(), &payload_type)) {
+ return;
+ }
+
+ // See if this payload_type is registered as one that usually gets its own
+ // SSRC (RTX) or at least is safe to drop either way (FEC). If it is, and
+ // it wasn't handled above by DeliverPacket, that means we don't know what
+ // stream it associates with, and we shouldn't ever create an implicit channel
+ // for these.
+ for (auto& codec : recv_codecs_) {
+ if (payload_type == codec.rtx_payload_type ||
+ payload_type == codec.ulpfec.red_rtx_payload_type ||
+ payload_type == codec.ulpfec.ulpfec_payload_type) {
+ return;
+ }
+ }
+ if (payload_type == recv_flexfec_payload_type_) {
+ return;
+ }
+
+ switch (unsignalled_ssrc_handler_->OnUnsignalledSsrc(this, ssrc)) {
+ case UnsignalledSsrcHandler::kDropPacket:
+ return;
+ case UnsignalledSsrcHandler::kDeliverPacket:
+ break;
+ }
+
+ if (call_->Receiver()->DeliverPacket(
+ webrtc::MediaType::VIDEO,
+ packet->cdata(), packet->size(),
+ webrtc_packet_time) != webrtc::PacketReceiver::DELIVERY_OK) {
+ RTC_LOG(LS_WARNING) << "Failed to deliver RTP packet on re-delivery.";
+ return;
+ }
+}
+
+void WebRtcVideoChannel::OnRtcpReceived(
+ rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) {
+ const webrtc::PacketTime webrtc_packet_time(packet_time.timestamp,
+ packet_time.not_before);
+ // TODO(pbos): Check webrtc::PacketReceiver::DELIVERY_OK once we deliver
+ // for both audio and video on the same path. Since BundleFilter doesn't
+ // filter RTCP anymore incoming RTCP packets could've been going to audio (so
+ // logging failures spam the log).
+ call_->Receiver()->DeliverPacket(
+ webrtc::MediaType::VIDEO,
+ packet->cdata(), packet->size(),
+ webrtc_packet_time);
+}
+
+void WebRtcVideoChannel::OnReadyToSend(bool ready) {
+ RTC_LOG(LS_VERBOSE) << "OnReadyToSend: " << (ready ? "Ready." : "Not ready.");
+ call_->SignalChannelNetworkState(
+ webrtc::MediaType::VIDEO,
+ ready ? webrtc::kNetworkUp : webrtc::kNetworkDown);
+}
+
+void WebRtcVideoChannel::OnNetworkRouteChanged(
+ const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) {
+ // TODO(zhihaung): Merge these two callbacks.
+ call_->OnNetworkRouteChanged(transport_name, network_route);
+ call_->OnTransportOverheadChanged(webrtc::MediaType::VIDEO,
+ network_route.packet_overhead);
+}
+
+void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) {
+ MediaChannel::SetInterface(iface);
+ // Set the RTP recv/send buffer to a bigger size
+ MediaChannel::SetOption(NetworkInterface::ST_RTP,
+ rtc::Socket::OPT_RCVBUF,
+ kVideoRtpBufferSize);
+
+ // Speculative change to increase the outbound socket buffer size.
+ // In b/15152257, we are seeing a significant number of packets discarded
+ // due to lack of socket buffer space, although it's not yet clear what the
+ // ideal value should be.
+ MediaChannel::SetOption(NetworkInterface::ST_RTP,
+ rtc::Socket::OPT_SNDBUF,
+ kVideoRtpBufferSize);
+}
+
+rtc::Optional<uint32_t> WebRtcVideoChannel::GetDefaultReceiveStreamSsrc() {
+ rtc::CritScope stream_lock(&stream_crit_);
+ rtc::Optional<uint32_t> ssrc;
+ for (auto it = receive_streams_.begin(); it != receive_streams_.end(); ++it) {
+ if (it->second->IsDefaultStream()) {
+ ssrc.emplace(it->first);
+ break;
+ }
+ }
+ return ssrc;
+}
+
+bool WebRtcVideoChannel::SendRtp(const uint8_t* data,
+ size_t len,
+ const webrtc::PacketOptions& options) {
+ rtc::CopyOnWriteBuffer packet(data, len, kMaxRtpPacketLen);
+ rtc::PacketOptions rtc_options;
+ rtc_options.packet_id = options.packet_id;
+ return MediaChannel::SendPacket(&packet, rtc_options);
+}
+
+bool WebRtcVideoChannel::SendRtcp(const uint8_t* data, size_t len) {
+ rtc::CopyOnWriteBuffer packet(data, len, kMaxRtpPacketLen);
+ return MediaChannel::SendRtcp(&packet, rtc::PacketOptions());
+}
+
+WebRtcVideoChannel::WebRtcVideoSendStream::VideoSendStreamParameters::
+ VideoSendStreamParameters(
+ webrtc::VideoSendStream::Config config,
+ const VideoOptions& options,
+ int max_bitrate_bps,
+ const rtc::Optional<VideoCodecSettings>& codec_settings)
+ : config(std::move(config)),
+ options(options),
+ max_bitrate_bps(max_bitrate_bps),
+ conference_mode(false),
+ codec_settings(codec_settings) {}
+
+WebRtcVideoChannel::WebRtcVideoSendStream::WebRtcVideoSendStream(
+ webrtc::Call* call,
+ const StreamParams& sp,
+ webrtc::VideoSendStream::Config config,
+ const VideoOptions& options,
+ webrtc::VideoEncoderFactory* encoder_factory,
+ bool enable_cpu_overuse_detection,
+ int max_bitrate_bps,
+ const rtc::Optional<VideoCodecSettings>& codec_settings,
+ const rtc::Optional<std::vector<webrtc::RtpExtension>>& rtp_extensions,
+ // TODO(deadbeef): Don't duplicate information between send_params,
+ // rtp_extensions, options, etc.
+ const VideoSendParameters& send_params)
+ : worker_thread_(rtc::Thread::Current()),
+ ssrcs_(sp.ssrcs),
+ ssrc_groups_(sp.ssrc_groups),
+ call_(call),
+ enable_cpu_overuse_detection_(enable_cpu_overuse_detection),
+ source_(nullptr),
+ encoder_factory_(encoder_factory),
+ stream_(nullptr),
+ encoder_sink_(nullptr),
+ parameters_(std::move(config), options, max_bitrate_bps, codec_settings),
+ rtp_parameters_(CreateRtpParametersWithOneEncoding()),
+ sending_(false) {
+ parameters_.config.rtp.max_packet_size = kVideoMtu;
+ parameters_.conference_mode = send_params.conference_mode;
+
+ sp.GetPrimarySsrcs(&parameters_.config.rtp.ssrcs);
+
+ // ValidateStreamParams should prevent this from happening.
+ RTC_CHECK(!parameters_.config.rtp.ssrcs.empty());
+ rtp_parameters_.encodings[0].ssrc = parameters_.config.rtp.ssrcs[0];
+
+ // RTX.
+ sp.GetFidSsrcs(parameters_.config.rtp.ssrcs,
+ &parameters_.config.rtp.rtx.ssrcs);
+
+ // FlexFEC SSRCs.
+ // TODO(brandtr): This code needs to be generalized when we add support for
+ // multistream protection.
+ if (IsFlexfecFieldTrialEnabled()) {
+ uint32_t flexfec_ssrc;
+ bool flexfec_enabled = false;
+ for (uint32_t primary_ssrc : parameters_.config.rtp.ssrcs) {
+ if (sp.GetFecFrSsrc(primary_ssrc, &flexfec_ssrc)) {
+ if (flexfec_enabled) {
+ RTC_LOG(LS_INFO)
+ << "Multiple FlexFEC streams in local SDP, but "
+ "our implementation only supports a single FlexFEC "
+ "stream. Will not enable FlexFEC for proposed "
+ "stream with SSRC: "
+ << flexfec_ssrc << ".";
+ continue;
+ }
+
+ flexfec_enabled = true;
+ parameters_.config.rtp.flexfec.ssrc = flexfec_ssrc;
+ parameters_.config.rtp.flexfec.protected_media_ssrcs = {primary_ssrc};
+ }
+ }
+ }
+
+ parameters_.config.rtp.c_name = sp.cname;
+ parameters_.config.track_id = sp.id;
+ if (rtp_extensions) {
+ parameters_.config.rtp.extensions = *rtp_extensions;
+ }
+ parameters_.config.rtp.rtcp_mode = send_params.rtcp.reduced_size
+ ? webrtc::RtcpMode::kReducedSize
+ : webrtc::RtcpMode::kCompound;
+ if (codec_settings) {
+ bool force_encoder_allocation = false;
+ SetCodec(*codec_settings, force_encoder_allocation);
+ }
+}
+
+WebRtcVideoChannel::WebRtcVideoSendStream::~WebRtcVideoSendStream() {
+ if (stream_ != NULL) {
+ call_->DestroyVideoSendStream(stream_);
+ }
+ // Release |allocated_encoder_|.
+ allocated_encoder_.reset();
+}
+
+bool WebRtcVideoChannel::WebRtcVideoSendStream::SetVideoSend(
+ bool enable,
+ const VideoOptions* options,
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source) {
+ TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::SetVideoSend");
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+
+ // Ignore |options| pointer if |enable| is false.
+ bool options_present = enable && options;
+
+ if (options_present) {
+ VideoOptions old_options = parameters_.options;
+ parameters_.options.SetAll(*options);
+ if (parameters_.options.is_screencast.value_or(false) !=
+ old_options.is_screencast.value_or(false) &&
+ parameters_.codec_settings) {
+ // If screen content settings change, we may need to recreate the codec
+ // instance so that the correct type is used.
+
+ bool force_encoder_allocation = true;
+ SetCodec(*parameters_.codec_settings, force_encoder_allocation);
+ // Mark screenshare parameter as being updated, then test for any other
+ // changes that may require codec reconfiguration.
+ old_options.is_screencast = options->is_screencast;
+ }
+ if (parameters_.options != old_options) {
+ ReconfigureEncoder();
+ }
+ }
+
+ if (source_ && stream_) {
+ stream_->SetSource(nullptr, DegradationPreference::kDegradationDisabled);
+ }
+ // Switch to the new source.
+ source_ = source;
+ if (source && stream_) {
+ stream_->SetSource(this, GetDegradationPreference());
+ }
+ return true;
+}
+
+webrtc::VideoSendStream::DegradationPreference
+WebRtcVideoChannel::WebRtcVideoSendStream::GetDegradationPreference() const {
+ // Do not adapt resolution for screen content as this will likely
+ // result in blurry and unreadable text.
+ // |this| acts like a VideoSource to make sure SinkWants are handled on the
+ // correct thread.
+ DegradationPreference degradation_preference;
+ if (!enable_cpu_overuse_detection_) {
+ degradation_preference = DegradationPreference::kDegradationDisabled;
+ } else {
+ if (parameters_.options.is_screencast.value_or(false)) {
+ degradation_preference = DegradationPreference::kMaintainResolution;
+ } else if (webrtc::field_trial::IsEnabled(
+ "WebRTC-Video-BalancedDegradation")) {
+ degradation_preference = DegradationPreference::kBalanced;
+ } else {
+ degradation_preference = DegradationPreference::kMaintainFramerate;
+ }
+ }
+ return degradation_preference;
+}
+
+const std::vector<uint32_t>&
+WebRtcVideoChannel::WebRtcVideoSendStream::GetSsrcs() const {
+ return ssrcs_;
+}
+
+void WebRtcVideoChannel::WebRtcVideoSendStream::SetCodec(
+ const VideoCodecSettings& codec_settings,
+ bool force_encoder_allocation) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ parameters_.encoder_config = CreateVideoEncoderConfig(codec_settings.codec);
+ RTC_DCHECK_GT(parameters_.encoder_config.number_of_streams, 0);
+
+ // Do not re-create encoders of the same type. We can't overwrite
+ // |allocated_encoder_| immediately, because we need to release it after the
+ // RecreateWebRtcStream() call.
+ std::unique_ptr<webrtc::VideoEncoder> new_encoder;
+ if (force_encoder_allocation || !allocated_encoder_ ||
+ allocated_codec_ != codec_settings.codec) {
+ const webrtc::SdpVideoFormat format(codec_settings.codec.name,
+ codec_settings.codec.params);
+ new_encoder = encoder_factory_->CreateVideoEncoder(format);
+
+ parameters_.config.encoder_settings.encoder = new_encoder.get();
+
+ const webrtc::VideoEncoderFactory::CodecInfo info =
+ encoder_factory_->QueryVideoEncoder(format);
+ parameters_.config.encoder_settings.full_overuse_time =
+ info.is_hardware_accelerated;
+ parameters_.config.encoder_settings.internal_source =
+ info.has_internal_source;
+ } else {
+ new_encoder = std::move(allocated_encoder_);
+ }
+ parameters_.config.encoder_settings.payload_name = codec_settings.codec.name;
+ parameters_.config.encoder_settings.payload_type = codec_settings.codec.id;
+ parameters_.config.rtp.ulpfec = codec_settings.ulpfec;
+ parameters_.config.rtp.flexfec.payload_type =
+ codec_settings.flexfec_payload_type;
+
+ // Set RTX payload type if RTX is enabled.
+ if (!parameters_.config.rtp.rtx.ssrcs.empty()) {
+ if (codec_settings.rtx_payload_type == -1) {
+ RTC_LOG(LS_WARNING)
+ << "RTX SSRCs configured but there's no configured RTX "
+ "payload type. Ignoring.";
+ parameters_.config.rtp.rtx.ssrcs.clear();
+ } else {
+ parameters_.config.rtp.rtx.payload_type = codec_settings.rtx_payload_type;
+ }
+ }
+
+ parameters_.config.rtp.nack.rtp_history_ms =
+ HasNack(codec_settings.codec) ? kNackHistoryMs : 0;
+
+ parameters_.codec_settings = codec_settings;
+
+ RTC_LOG(LS_INFO) << "RecreateWebRtcStream (send) because of SetCodec.";
+ RecreateWebRtcStream();
+ allocated_encoder_ = std::move(new_encoder);
+ allocated_codec_ = codec_settings.codec;
+}
+
+void WebRtcVideoChannel::WebRtcVideoSendStream::SetSendParameters(
+ const ChangedSendParameters& params) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ // |recreate_stream| means construction-time parameters have changed and the
+ // sending stream needs to be reset with the new config.
+ bool recreate_stream = false;
+ if (params.rtcp_mode) {
+ parameters_.config.rtp.rtcp_mode = *params.rtcp_mode;
+ recreate_stream = true;
+ }
+ if (params.rtp_header_extensions) {
+ parameters_.config.rtp.extensions = *params.rtp_header_extensions;
+ recreate_stream = true;
+ }
+ if (params.max_bandwidth_bps) {
+ parameters_.max_bitrate_bps = *params.max_bandwidth_bps;
+ ReconfigureEncoder();
+ }
+ if (params.conference_mode) {
+ parameters_.conference_mode = *params.conference_mode;
+ }
+
+ // Set codecs and options.
+ if (params.codec) {
+ bool force_encoder_allocation = false;
+ SetCodec(*params.codec, force_encoder_allocation);
+ recreate_stream = false; // SetCodec has already recreated the stream.
+ } else if (params.conference_mode && parameters_.codec_settings) {
+ bool force_encoder_allocation = false;
+ SetCodec(*parameters_.codec_settings, force_encoder_allocation);
+ recreate_stream = false; // SetCodec has already recreated the stream.
+ }
+ if (recreate_stream) {
+ RTC_LOG(LS_INFO)
+ << "RecreateWebRtcStream (send) because of SetSendParameters";
+ RecreateWebRtcStream();
+ }
+}
+
+bool WebRtcVideoChannel::WebRtcVideoSendStream::SetRtpParameters(
+ const webrtc::RtpParameters& new_parameters) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ if (!ValidateRtpParameters(new_parameters)) {
+ return false;
+ }
+
+ bool reconfigure_encoder = new_parameters.encodings[0].max_bitrate_bps !=
+ rtp_parameters_.encodings[0].max_bitrate_bps;
+ rtp_parameters_ = new_parameters;
+ // Codecs are currently handled at the WebRtcVideoChannel level.
+ rtp_parameters_.codecs.clear();
+ if (reconfigure_encoder) {
+ ReconfigureEncoder();
+ }
+ // Encoding may have been activated/deactivated.
+ UpdateSendState();
+ return true;
+}
+
+webrtc::RtpParameters
+WebRtcVideoChannel::WebRtcVideoSendStream::GetRtpParameters() const {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ return rtp_parameters_;
+}
+
+bool WebRtcVideoChannel::WebRtcVideoSendStream::ValidateRtpParameters(
+ const webrtc::RtpParameters& rtp_parameters) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ if (rtp_parameters.encodings.size() != 1) {
+ RTC_LOG(LS_ERROR)
+ << "Attempted to set RtpParameters without exactly one encoding";
+ return false;
+ }
+ if (rtp_parameters.encodings[0].ssrc != rtp_parameters_.encodings[0].ssrc) {
+ RTC_LOG(LS_ERROR) << "Attempted to set RtpParameters with modified SSRC";
+ return false;
+ }
+ return true;
+}
+
+void WebRtcVideoChannel::WebRtcVideoSendStream::UpdateSendState() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ // TODO(deadbeef): Need to handle more than one encoding in the future.
+ RTC_DCHECK(rtp_parameters_.encodings.size() == 1u);
+ if (sending_ && rtp_parameters_.encodings[0].active) {
+ RTC_DCHECK(stream_ != nullptr);
+ stream_->Start();
+ } else {
+ if (stream_ != nullptr) {
+ stream_->Stop();
+ }
+ }
+}
+
+webrtc::VideoEncoderConfig
+WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig(
+ const VideoCodec& codec) const {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ webrtc::VideoEncoderConfig encoder_config;
+ bool is_screencast = parameters_.options.is_screencast.value_or(false);
+ if (is_screencast) {
+ encoder_config.min_transmit_bitrate_bps =
+ 1000 * parameters_.options.screencast_min_bitrate_kbps.value_or(0);
+ encoder_config.content_type =
+ webrtc::VideoEncoderConfig::ContentType::kScreen;
+ } else {
+ encoder_config.min_transmit_bitrate_bps = 0;
+ encoder_config.content_type =
+ webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo;
+ }
+
+ // By default, the stream count for the codec configuration should match the
+ // number of negotiated ssrcs. But if the codec is blacklisted for simulcast
+ // or a screencast (and not in simulcast screenshare experiment), only
+ // configure a single stream.
+ encoder_config.number_of_streams = parameters_.config.rtp.ssrcs.size();
+ if (IsCodecBlacklistedForSimulcast(codec.name) ||
+ (is_screencast &&
+ (!UseSimulcastScreenshare() || !parameters_.conference_mode))) {
+ encoder_config.number_of_streams = 1;
+ }
+
+ int stream_max_bitrate = parameters_.max_bitrate_bps;
+ if (rtp_parameters_.encodings[0].max_bitrate_bps) {
+ stream_max_bitrate =
+ webrtc::MinPositive(*(rtp_parameters_.encodings[0].max_bitrate_bps),
+ parameters_.max_bitrate_bps);
+ }
+
+ int codec_max_bitrate_kbps;
+ if (codec.GetParam(kCodecParamMaxBitrate, &codec_max_bitrate_kbps)) {
+ stream_max_bitrate = codec_max_bitrate_kbps * 1000;
+ }
+ encoder_config.max_bitrate_bps = stream_max_bitrate;
+
+ int max_qp = kDefaultQpMax;
+ codec.GetParam(kCodecParamMaxQuantization, &max_qp);
+ encoder_config.video_stream_factory =
+ new rtc::RefCountedObject<EncoderStreamFactory>(
+ codec.name, max_qp, kDefaultVideoMaxFramerate, is_screencast,
+ parameters_.conference_mode);
+ return encoder_config;
+}
+
+void WebRtcVideoChannel::WebRtcVideoSendStream::ReconfigureEncoder() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ if (!stream_) {
+ // The webrtc::VideoSendStream |stream_| has not yet been created but other
+ // parameters has changed.
+ return;
+ }
+
+ RTC_DCHECK_GT(parameters_.encoder_config.number_of_streams, 0);
+
+ RTC_CHECK(parameters_.codec_settings);
+ VideoCodecSettings codec_settings = *parameters_.codec_settings;
+
+ webrtc::VideoEncoderConfig encoder_config =
+ CreateVideoEncoderConfig(codec_settings.codec);
+
+ encoder_config.encoder_specific_settings = ConfigureVideoEncoderSettings(
+ codec_settings.codec);
+
+ stream_->ReconfigureVideoEncoder(encoder_config.Copy());
+
+ encoder_config.encoder_specific_settings = NULL;
+
+ parameters_.encoder_config = std::move(encoder_config);
+}
+
+void WebRtcVideoChannel::WebRtcVideoSendStream::SetSend(bool send) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ sending_ = send;
+ UpdateSendState();
+}
+
+void WebRtcVideoChannel::WebRtcVideoSendStream::RemoveSink(
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_DCHECK(encoder_sink_ == sink);
+ encoder_sink_ = nullptr;
+ source_->RemoveSink(sink);
+}
+
+void WebRtcVideoChannel::WebRtcVideoSendStream::AddOrUpdateSink(
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
+ if (worker_thread_ == rtc::Thread::Current()) {
+ // AddOrUpdateSink is called on |worker_thread_| if this is the first
+ // registration of |sink|.
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ encoder_sink_ = sink;
+ source_->AddOrUpdateSink(encoder_sink_, wants);
+ } else {
+ // Subsequent calls to AddOrUpdateSink will happen on the encoder task
+ // queue.
+ invoker_.AsyncInvoke<void>(
+ RTC_FROM_HERE, worker_thread_, [this, sink, wants] {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ // |sink| may be invalidated after this task was posted since
+ // RemoveSink is called on the worker thread.
+ bool encoder_sink_valid = (sink == encoder_sink_);
+ if (source_ && encoder_sink_valid) {
+ source_->AddOrUpdateSink(encoder_sink_, wants);
+ }
+ });
+ }
+}
+
+VideoSenderInfo WebRtcVideoChannel::WebRtcVideoSendStream::GetVideoSenderInfo(
+ bool log_stats) {
+ VideoSenderInfo info;
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ for (uint32_t ssrc : parameters_.config.rtp.ssrcs)
+ info.add_ssrc(ssrc);
+
+ if (parameters_.codec_settings) {
+ info.codec_name = parameters_.codec_settings->codec.name;
+ info.codec_payload_type = parameters_.codec_settings->codec.id;
+ }
+
+ if (stream_ == NULL)
+ return info;
+
+ webrtc::VideoSendStream::Stats stats = stream_->GetStats();
+
+ if (log_stats)
+ RTC_LOG(LS_INFO) << stats.ToString(rtc::TimeMillis());
+
+ info.adapt_changes = stats.number_of_cpu_adapt_changes;
+ info.adapt_reason =
+ stats.cpu_limited_resolution ? ADAPTREASON_CPU : ADAPTREASON_NONE;
+ info.has_entered_low_resolution = stats.has_entered_low_resolution;
+
+ // Get bandwidth limitation info from stream_->GetStats().
+ // Input resolution (output from video_adapter) can be further scaled down or
+ // higher video layer(s) can be dropped due to bitrate constraints.
+ // Note, adapt_changes only include changes from the video_adapter.
+ if (stats.bw_limited_resolution)
+ info.adapt_reason |= ADAPTREASON_BANDWIDTH;
+
+ info.encoder_implementation_name = stats.encoder_implementation_name;
+ info.ssrc_groups = ssrc_groups_;
+ info.framerate_input = stats.input_frame_rate;
+ info.framerate_sent = stats.encode_frame_rate;
+ info.avg_encode_ms = stats.avg_encode_time_ms;
+ info.encode_usage_percent = stats.encode_usage_percent;
+ info.frames_encoded = stats.frames_encoded;
+ info.qp_sum = stats.qp_sum;
+
+ info.nominal_bitrate = stats.media_bitrate_bps;
+ info.preferred_bitrate = stats.preferred_media_bitrate_bps;
+
+ info.content_type = stats.content_type;
+
+ info.send_frame_width = 0;
+ info.send_frame_height = 0;
+ for (std::map<uint32_t, webrtc::VideoSendStream::StreamStats>::iterator it =
+ stats.substreams.begin();
+ it != stats.substreams.end(); ++it) {
+ // TODO(pbos): Wire up additional stats, such as padding bytes.
+ webrtc::VideoSendStream::StreamStats stream_stats = it->second;
+ info.bytes_sent += stream_stats.rtp_stats.transmitted.payload_bytes +
+ stream_stats.rtp_stats.transmitted.header_bytes +
+ stream_stats.rtp_stats.transmitted.padding_bytes;
+ info.packets_sent += stream_stats.rtp_stats.transmitted.packets;
+ info.packets_lost += stream_stats.rtcp_stats.packets_lost;
+ if (stream_stats.width > info.send_frame_width)
+ info.send_frame_width = stream_stats.width;
+ if (stream_stats.height > info.send_frame_height)
+ info.send_frame_height = stream_stats.height;
+ info.firs_rcvd += stream_stats.rtcp_packet_type_counts.fir_packets;
+ info.nacks_rcvd += stream_stats.rtcp_packet_type_counts.nack_packets;
+ info.plis_rcvd += stream_stats.rtcp_packet_type_counts.pli_packets;
+ }
+
+ if (!stats.substreams.empty()) {
+ // TODO(pbos): Report fraction lost per SSRC.
+ webrtc::VideoSendStream::StreamStats first_stream_stats =
+ stats.substreams.begin()->second;
+ info.fraction_lost =
+ static_cast<float>(first_stream_stats.rtcp_stats.fraction_lost) /
+ (1 << 8);
+ }
+
+ return info;
+}
+
+void WebRtcVideoChannel::WebRtcVideoSendStream::FillBitrateInfo(
+ BandwidthEstimationInfo* bwe_info) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ if (stream_ == NULL) {
+ return;
+ }
+ webrtc::VideoSendStream::Stats stats = stream_->GetStats();
+ for (std::map<uint32_t, webrtc::VideoSendStream::StreamStats>::iterator it =
+ stats.substreams.begin();
+ it != stats.substreams.end(); ++it) {
+ bwe_info->transmit_bitrate += it->second.total_bitrate_bps;
+ bwe_info->retransmit_bitrate += it->second.retransmit_bitrate_bps;
+ }
+ bwe_info->target_enc_bitrate += stats.target_media_bitrate_bps;
+ bwe_info->actual_enc_bitrate += stats.media_bitrate_bps;
+}
+
+void WebRtcVideoChannel::WebRtcVideoSendStream::RecreateWebRtcStream() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ if (stream_ != NULL) {
+ call_->DestroyVideoSendStream(stream_);
+ }
+
+ RTC_CHECK(parameters_.codec_settings);
+ RTC_DCHECK_EQ((parameters_.encoder_config.content_type ==
+ webrtc::VideoEncoderConfig::ContentType::kScreen),
+ parameters_.options.is_screencast.value_or(false))
+ << "encoder content type inconsistent with screencast option";
+ parameters_.encoder_config.encoder_specific_settings =
+ ConfigureVideoEncoderSettings(parameters_.codec_settings->codec);
+
+ webrtc::VideoSendStream::Config config = parameters_.config.Copy();
+ if (!config.rtp.rtx.ssrcs.empty() && config.rtp.rtx.payload_type == -1) {
+ RTC_LOG(LS_WARNING) << "RTX SSRCs configured but there's no configured RTX "
+ "payload type the set codec. Ignoring RTX.";
+ config.rtp.rtx.ssrcs.clear();
+ }
+ stream_ = call_->CreateVideoSendStream(std::move(config),
+ parameters_.encoder_config.Copy());
+
+ parameters_.encoder_config.encoder_specific_settings = NULL;
+
+ if (source_) {
+ stream_->SetSource(this, GetDegradationPreference());
+ }
+
+ // Call stream_->Start() if necessary conditions are met.
+ UpdateSendState();
+}
+
+WebRtcVideoChannel::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
+ webrtc::Call* call,
+ const StreamParams& sp,
+ webrtc::VideoReceiveStream::Config config,
+ DecoderFactoryAdapter* decoder_factory,
+ bool default_stream,
+ const std::vector<VideoCodecSettings>& recv_codecs,
+ const webrtc::FlexfecReceiveStream::Config& flexfec_config)
+ : call_(call),
+ stream_params_(sp),
+ stream_(NULL),
+ default_stream_(default_stream),
+ config_(std::move(config)),
+ flexfec_config_(flexfec_config),
+ flexfec_stream_(nullptr),
+ decoder_factory_(decoder_factory),
+ sink_(NULL),
+ first_frame_timestamp_(-1),
+ estimated_remote_start_ntp_time_ms_(0) {
+ config_.renderer = this;
+ DecoderMap old_decoders;
+ ConfigureCodecs(recv_codecs, &old_decoders);
+ ConfigureFlexfecCodec(flexfec_config.payload_type);
+ MaybeRecreateWebRtcFlexfecStream();
+ RecreateWebRtcVideoStream();
+ RTC_DCHECK(old_decoders.empty());
+}
+
+WebRtcVideoChannel::WebRtcVideoReceiveStream::~WebRtcVideoReceiveStream() {
+ if (flexfec_stream_) {
+ MaybeDissociateFlexfecFromVideo();
+ call_->DestroyFlexfecReceiveStream(flexfec_stream_);
+ }
+ call_->DestroyVideoReceiveStream(stream_);
+ allocated_decoders_.clear();
+}
+
+const std::vector<uint32_t>&
+WebRtcVideoChannel::WebRtcVideoReceiveStream::GetSsrcs() const {
+ return stream_params_.ssrcs;
+}
+
+rtc::Optional<uint32_t>
+WebRtcVideoChannel::WebRtcVideoReceiveStream::GetFirstPrimarySsrc() const {
+ std::vector<uint32_t> primary_ssrcs;
+ stream_params_.GetPrimarySsrcs(&primary_ssrcs);
+
+ if (primary_ssrcs.empty()) {
+ RTC_LOG(LS_WARNING)
+ << "Empty primary ssrcs vector, returning empty optional";
+ return rtc::nullopt;
+ } else {
+ return primary_ssrcs[0];
+ }
+}
+
+void WebRtcVideoChannel::WebRtcVideoReceiveStream::ConfigureCodecs(
+ const std::vector<VideoCodecSettings>& recv_codecs,
+ DecoderMap* old_decoders) {
+ RTC_DCHECK(!recv_codecs.empty());
+ *old_decoders = std::move(allocated_decoders_);
+ config_.decoders.clear();
+ config_.rtp.rtx_associated_payload_types.clear();
+ for (const auto& recv_codec : recv_codecs) {
+ webrtc::SdpVideoFormat video_format(recv_codec.codec.name,
+ recv_codec.codec.params);
+ std::unique_ptr<webrtc::VideoDecoder> new_decoder;
+
+ auto it = old_decoders->find(video_format);
+ if (it != old_decoders->end()) {
+ new_decoder = std::move(it->second);
+ old_decoders->erase(it);
+ }
+
+ if (!new_decoder && decoder_factory_) {
+ decoder_factory_->SetReceiveStreamId(stream_params_.id);
+ new_decoder = decoder_factory_->CreateVideoDecoder(webrtc::SdpVideoFormat(
+ recv_codec.codec.name, recv_codec.codec.params));
+ }
+
+ // If we still have no valid decoder, we have to create a "Null" decoder
+ // that ignores all calls. The reason we can get into this state is that
+ // the old decoder factory interface doesn't have a way to query supported
+ // codecs.
+ if (!new_decoder)
+ new_decoder.reset(new NullVideoDecoder());
+
+ webrtc::VideoReceiveStream::Decoder decoder;
+ decoder.decoder = new_decoder.get();
+ decoder.payload_type = recv_codec.codec.id;
+ decoder.payload_name = recv_codec.codec.name;
+ decoder.codec_params = recv_codec.codec.params;
+ config_.decoders.push_back(decoder);
+ config_.rtp.rtx_associated_payload_types[recv_codec.rtx_payload_type] =
+ recv_codec.codec.id;
+
+ const bool did_insert =
+ allocated_decoders_
+ .insert(std::make_pair(video_format, std::move(new_decoder)))
+ .second;
+ RTC_CHECK(did_insert);
+ }
+
+ const auto& codec = recv_codecs.front();
+ config_.rtp.ulpfec_payload_type = codec.ulpfec.ulpfec_payload_type;
+ config_.rtp.red_payload_type = codec.ulpfec.red_payload_type;
+
+ config_.rtp.nack.rtp_history_ms = HasNack(codec.codec) ? kNackHistoryMs : 0;
+ if (codec.ulpfec.red_rtx_payload_type != -1) {
+ config_.rtp
+ .rtx_associated_payload_types[codec.ulpfec.red_rtx_payload_type] =
+ codec.ulpfec.red_payload_type;
+ }
+}
+
+void WebRtcVideoChannel::WebRtcVideoReceiveStream::ConfigureFlexfecCodec(
+ int flexfec_payload_type) {
+ flexfec_config_.payload_type = flexfec_payload_type;
+}
+
+void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetLocalSsrc(
+ uint32_t local_ssrc) {
+ // TODO(pbos): Consider turning this sanity check into a RTC_DCHECK. You
+ // should not be able to create a sender with the same SSRC as a receiver, but
+ // right now this can't be done due to unittests depending on receiving what
+ // they are sending from the same MediaChannel.
+ if (local_ssrc == config_.rtp.remote_ssrc) {
+ RTC_LOG(LS_INFO) << "Ignoring call to SetLocalSsrc because parameters are "
+ "unchanged; local_ssrc="
+ << local_ssrc;
+ return;
+ }
+
+ config_.rtp.local_ssrc = local_ssrc;
+ flexfec_config_.local_ssrc = local_ssrc;
+ RTC_LOG(LS_INFO)
+ << "RecreateWebRtcStream (recv) because of SetLocalSsrc; local_ssrc="
+ << local_ssrc;
+ MaybeRecreateWebRtcFlexfecStream();
+ RecreateWebRtcVideoStream();
+}
+
+void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetFeedbackParameters(
+ bool nack_enabled,
+ bool remb_enabled,
+ bool transport_cc_enabled,
+ webrtc::RtcpMode rtcp_mode) {
+ int nack_history_ms = nack_enabled ? kNackHistoryMs : 0;
+ if (config_.rtp.nack.rtp_history_ms == nack_history_ms &&
+ config_.rtp.remb == remb_enabled &&
+ config_.rtp.transport_cc == transport_cc_enabled &&
+ config_.rtp.rtcp_mode == rtcp_mode) {
+ RTC_LOG(LS_INFO)
+ << "Ignoring call to SetFeedbackParameters because parameters are "
+ "unchanged; nack="
+ << nack_enabled << ", remb=" << remb_enabled
+ << ", transport_cc=" << transport_cc_enabled;
+ return;
+ }
+ config_.rtp.remb = remb_enabled;
+ config_.rtp.nack.rtp_history_ms = nack_history_ms;
+ config_.rtp.transport_cc = transport_cc_enabled;
+ config_.rtp.rtcp_mode = rtcp_mode;
+ // TODO(brandtr): We should be spec-compliant and set |transport_cc| here
+ // based on the rtcp-fb for the FlexFEC codec, not the media codec.
+ flexfec_config_.transport_cc = config_.rtp.transport_cc;
+ flexfec_config_.rtcp_mode = config_.rtp.rtcp_mode;
+ RTC_LOG(LS_INFO)
+ << "RecreateWebRtcStream (recv) because of SetFeedbackParameters; nack="
+ << nack_enabled << ", remb=" << remb_enabled
+ << ", transport_cc=" << transport_cc_enabled;
+ MaybeRecreateWebRtcFlexfecStream();
+ RecreateWebRtcVideoStream();
+}
+
+void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetRecvParameters(
+ const ChangedRecvParameters& params) {
+ bool video_needs_recreation = false;
+ bool flexfec_needs_recreation = false;
+ DecoderMap old_decoders;
+ if (params.codec_settings) {
+ ConfigureCodecs(*params.codec_settings, &old_decoders);
+ video_needs_recreation = true;
+ }
+ if (params.rtp_header_extensions) {
+ config_.rtp.extensions = *params.rtp_header_extensions;
+ flexfec_config_.rtp_header_extensions = *params.rtp_header_extensions;
+ video_needs_recreation = true;
+ flexfec_needs_recreation = true;
+ }
+ if (params.flexfec_payload_type) {
+ ConfigureFlexfecCodec(*params.flexfec_payload_type);
+ flexfec_needs_recreation = true;
+ }
+ if (flexfec_needs_recreation) {
+ RTC_LOG(LS_INFO) << "MaybeRecreateWebRtcFlexfecStream (recv) because of "
+ "SetRecvParameters";
+ MaybeRecreateWebRtcFlexfecStream();
+ }
+ if (video_needs_recreation) {
+ RTC_LOG(LS_INFO)
+ << "RecreateWebRtcVideoStream (recv) because of SetRecvParameters";
+ RecreateWebRtcVideoStream();
+ }
+}
+
+void WebRtcVideoChannel::WebRtcVideoReceiveStream::
+ RecreateWebRtcVideoStream() {
+ if (stream_) {
+ MaybeDissociateFlexfecFromVideo();
+ call_->DestroyVideoReceiveStream(stream_);
+ stream_ = nullptr;
+ }
+ webrtc::VideoReceiveStream::Config config = config_.Copy();
+ config.rtp.protected_by_flexfec = (flexfec_stream_ != nullptr);
+ stream_ = call_->CreateVideoReceiveStream(std::move(config));
+ MaybeAssociateFlexfecWithVideo();
+ stream_->Start();
+}
+
+void WebRtcVideoChannel::WebRtcVideoReceiveStream::
+ MaybeRecreateWebRtcFlexfecStream() {
+ if (flexfec_stream_) {
+ MaybeDissociateFlexfecFromVideo();
+ call_->DestroyFlexfecReceiveStream(flexfec_stream_);
+ flexfec_stream_ = nullptr;
+ }
+ if (flexfec_config_.IsCompleteAndEnabled()) {
+ flexfec_stream_ = call_->CreateFlexfecReceiveStream(flexfec_config_);
+ MaybeAssociateFlexfecWithVideo();
+ }
+}
+
+void WebRtcVideoChannel::WebRtcVideoReceiveStream::
+ MaybeAssociateFlexfecWithVideo() {
+ if (stream_ && flexfec_stream_) {
+ stream_->AddSecondarySink(flexfec_stream_);
+ }
+}
+
+void WebRtcVideoChannel::WebRtcVideoReceiveStream::
+ MaybeDissociateFlexfecFromVideo() {
+ if (stream_ && flexfec_stream_) {
+ stream_->RemoveSecondarySink(flexfec_stream_);
+ }
+}
+
+void WebRtcVideoChannel::WebRtcVideoReceiveStream::OnFrame(
+ const webrtc::VideoFrame& frame) {
+ rtc::CritScope crit(&sink_lock_);
+
+ if (first_frame_timestamp_ < 0)
+ first_frame_timestamp_ = frame.timestamp();
+ int64_t rtp_time_elapsed_since_first_frame =
+ (timestamp_wraparound_handler_.Unwrap(frame.timestamp()) -
+ first_frame_timestamp_);
+ int64_t elapsed_time_ms = rtp_time_elapsed_since_first_frame /
+ (cricket::kVideoCodecClockrate / 1000);
+ if (frame.ntp_time_ms() > 0)
+ estimated_remote_start_ntp_time_ms_ = frame.ntp_time_ms() - elapsed_time_ms;
+
+ if (sink_ == NULL) {
+ RTC_LOG(LS_WARNING) << "VideoReceiveStream not connected to a VideoSink.";
+ return;
+ }
+
+ sink_->OnFrame(frame);
+}
+
+bool WebRtcVideoChannel::WebRtcVideoReceiveStream::IsDefaultStream() const {
+ return default_stream_;
+}
+
+void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetSink(
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ rtc::CritScope crit(&sink_lock_);
+ sink_ = sink;
+}
+
+std::string
+WebRtcVideoChannel::WebRtcVideoReceiveStream::GetCodecNameFromPayloadType(
+ int payload_type) {
+ for (const webrtc::VideoReceiveStream::Decoder& decoder : config_.decoders) {
+ if (decoder.payload_type == payload_type) {
+ return decoder.payload_name;
+ }
+ }
+ return "";
+}
+
+VideoReceiverInfo
+WebRtcVideoChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo(
+ bool log_stats) {
+ VideoReceiverInfo info;
+ info.ssrc_groups = stream_params_.ssrc_groups;
+ info.add_ssrc(config_.rtp.remote_ssrc);
+ webrtc::VideoReceiveStream::Stats stats = stream_->GetStats();
+ info.decoder_implementation_name = stats.decoder_implementation_name;
+ if (stats.current_payload_type != -1) {
+ info.codec_payload_type = stats.current_payload_type;
+ }
+ info.bytes_rcvd = stats.rtp_stats.transmitted.payload_bytes +
+ stats.rtp_stats.transmitted.header_bytes +
+ stats.rtp_stats.transmitted.padding_bytes;
+ info.packets_rcvd = stats.rtp_stats.transmitted.packets;
+ info.packets_lost = stats.rtcp_stats.packets_lost;
+ info.fraction_lost =
+ static_cast<float>(stats.rtcp_stats.fraction_lost) / (1 << 8);
+
+ info.framerate_rcvd = stats.network_frame_rate;
+ info.framerate_decoded = stats.decode_frame_rate;
+ info.framerate_output = stats.render_frame_rate;
+ info.frame_width = stats.width;
+ info.frame_height = stats.height;
+
+ {
+ rtc::CritScope frame_cs(&sink_lock_);
+ info.capture_start_ntp_time_ms = estimated_remote_start_ntp_time_ms_;
+ }
+
+ info.decode_ms = stats.decode_ms;
+ info.max_decode_ms = stats.max_decode_ms;
+ info.current_delay_ms = stats.current_delay_ms;
+ info.target_delay_ms = stats.target_delay_ms;
+ info.jitter_buffer_ms = stats.jitter_buffer_ms;
+ info.min_playout_delay_ms = stats.min_playout_delay_ms;
+ info.render_delay_ms = stats.render_delay_ms;
+ info.frames_received = stats.frame_counts.key_frames +
+ stats.frame_counts.delta_frames;
+ info.frames_decoded = stats.frames_decoded;
+ info.frames_rendered = stats.frames_rendered;
+ info.qp_sum = stats.qp_sum;
+
+ info.interframe_delay_max_ms = stats.interframe_delay_max_ms;
+
+ info.content_type = stats.content_type;
+
+ info.codec_name = GetCodecNameFromPayloadType(stats.current_payload_type);
+
+ info.firs_sent = stats.rtcp_packet_type_counts.fir_packets;
+ info.plis_sent = stats.rtcp_packet_type_counts.pli_packets;
+ info.nacks_sent = stats.rtcp_packet_type_counts.nack_packets;
+
+ info.timing_frame_info = stats.timing_frame_info;
+
+ if (log_stats)
+ RTC_LOG(LS_INFO) << stats.ToString(rtc::TimeMillis());
+
+ return info;
+}
+
+WebRtcVideoChannel::VideoCodecSettings::VideoCodecSettings()
+ : flexfec_payload_type(-1), rtx_payload_type(-1) {}
+
+bool WebRtcVideoChannel::VideoCodecSettings::operator==(
+ const WebRtcVideoChannel::VideoCodecSettings& other) const {
+ return codec == other.codec && ulpfec == other.ulpfec &&
+ flexfec_payload_type == other.flexfec_payload_type &&
+ rtx_payload_type == other.rtx_payload_type;
+}
+
+bool WebRtcVideoChannel::VideoCodecSettings::EqualsDisregardingFlexfec(
+ const WebRtcVideoChannel::VideoCodecSettings& a,
+ const WebRtcVideoChannel::VideoCodecSettings& b) {
+ return a.codec == b.codec && a.ulpfec == b.ulpfec &&
+ a.rtx_payload_type == b.rtx_payload_type;
+}
+
+bool WebRtcVideoChannel::VideoCodecSettings::operator!=(
+ const WebRtcVideoChannel::VideoCodecSettings& other) const {
+ return !(*this == other);
+}
+
+std::vector<WebRtcVideoChannel::VideoCodecSettings>
+WebRtcVideoChannel::MapCodecs(const std::vector<VideoCodec>& codecs) {
+ RTC_DCHECK(!codecs.empty());
+
+ std::vector<VideoCodecSettings> video_codecs;
+ std::map<int, bool> payload_used;
+ std::map<int, VideoCodec::CodecType> payload_codec_type;
+ // |rtx_mapping| maps video payload type to rtx payload type.
+ std::map<int, int> rtx_mapping;
+
+ webrtc::UlpfecConfig ulpfec_config;
+ int flexfec_payload_type = -1;
+
+ for (size_t i = 0; i < codecs.size(); ++i) {
+ const VideoCodec& in_codec = codecs[i];
+ int payload_type = in_codec.id;
+
+ if (payload_used[payload_type]) {
+ RTC_LOG(LS_ERROR) << "Payload type already registered: "
+ << in_codec.ToString();
+ return std::vector<VideoCodecSettings>();
+ }
+ payload_used[payload_type] = true;
+ payload_codec_type[payload_type] = in_codec.GetCodecType();
+
+ switch (in_codec.GetCodecType()) {
+ case VideoCodec::CODEC_RED: {
+ // RED payload type, should not have duplicates.
+ RTC_DCHECK_EQ(-1, ulpfec_config.red_payload_type);
+ ulpfec_config.red_payload_type = in_codec.id;
+ continue;
+ }
+
+ case VideoCodec::CODEC_ULPFEC: {
+ // ULPFEC payload type, should not have duplicates.
+ RTC_DCHECK_EQ(-1, ulpfec_config.ulpfec_payload_type);
+ ulpfec_config.ulpfec_payload_type = in_codec.id;
+ continue;
+ }
+
+ case VideoCodec::CODEC_FLEXFEC: {
+ // FlexFEC payload type, should not have duplicates.
+ RTC_DCHECK_EQ(-1, flexfec_payload_type);
+ flexfec_payload_type = in_codec.id;
+ continue;
+ }
+
+ case VideoCodec::CODEC_RTX: {
+ int associated_payload_type;
+ if (!in_codec.GetParam(kCodecParamAssociatedPayloadType,
+ &associated_payload_type) ||
+ !IsValidRtpPayloadType(associated_payload_type)) {
+ RTC_LOG(LS_ERROR)
+ << "RTX codec with invalid or no associated payload type: "
+ << in_codec.ToString();
+ return std::vector<VideoCodecSettings>();
+ }
+ rtx_mapping[associated_payload_type] = in_codec.id;
+ continue;
+ }
+
+ case VideoCodec::CODEC_VIDEO:
+ break;
+ }
+
+ video_codecs.push_back(VideoCodecSettings());
+ video_codecs.back().codec = in_codec;
+ }
+
+ // One of these codecs should have been a video codec. Only having FEC
+ // parameters into this code is a logic error.
+ RTC_DCHECK(!video_codecs.empty());
+
+ for (std::map<int, int>::const_iterator it = rtx_mapping.begin();
+ it != rtx_mapping.end();
+ ++it) {
+ if (!payload_used[it->first]) {
+ RTC_LOG(LS_ERROR) << "RTX mapped to payload not in codec list.";
+ return std::vector<VideoCodecSettings>();
+ }
+ if (payload_codec_type[it->first] != VideoCodec::CODEC_VIDEO &&
+ payload_codec_type[it->first] != VideoCodec::CODEC_RED) {
+ RTC_LOG(LS_ERROR)
+ << "RTX not mapped to regular video codec or RED codec.";
+ return std::vector<VideoCodecSettings>();
+ }
+
+ if (it->first == ulpfec_config.red_payload_type) {
+ ulpfec_config.red_rtx_payload_type = it->second;
+ }
+ }
+
+ for (size_t i = 0; i < video_codecs.size(); ++i) {
+ video_codecs[i].ulpfec = ulpfec_config;
+ video_codecs[i].flexfec_payload_type = flexfec_payload_type;
+ if (rtx_mapping[video_codecs[i].codec.id] != 0 &&
+ rtx_mapping[video_codecs[i].codec.id] !=
+ ulpfec_config.red_payload_type) {
+ video_codecs[i].rtx_payload_type = rtx_mapping[video_codecs[i].codec.id];
+ }
+ }
+
+ return video_codecs;
+}
+
+EncoderStreamFactory::EncoderStreamFactory(std::string codec_name,
+ int max_qp,
+ int max_framerate,
+ bool is_screencast,
+ bool conference_mode)
+ : codec_name_(codec_name),
+ max_qp_(max_qp),
+ max_framerate_(max_framerate),
+ is_screencast_(is_screencast),
+ conference_mode_(conference_mode) {}
+
+std::vector<webrtc::VideoStream> EncoderStreamFactory::CreateEncoderStreams(
+ int width,
+ int height,
+ const webrtc::VideoEncoderConfig& encoder_config) {
+ if (is_screencast_ &&
+ (!conference_mode_ || !cricket::UseSimulcastScreenshare())) {
+ RTC_DCHECK_EQ(1, encoder_config.number_of_streams);
+ }
+ if (encoder_config.number_of_streams > 1 ||
+ (CodecNamesEq(codec_name_, kVp8CodecName) && is_screencast_ &&
+ conference_mode_)) {
+ return GetSimulcastConfig(encoder_config.number_of_streams, width, height,
+ encoder_config.max_bitrate_bps, max_qp_,
+ max_framerate_, is_screencast_);
+ }
+
+ // For unset max bitrates set default bitrate for non-simulcast.
+ int max_bitrate_bps =
+ (encoder_config.max_bitrate_bps > 0)
+ ? encoder_config.max_bitrate_bps
+ : GetMaxDefaultVideoBitrateKbps(width, height) * 1000;
+
+ webrtc::VideoStream stream;
+ stream.width = width;
+ stream.height = height;
+ stream.max_framerate = max_framerate_;
+ stream.min_bitrate_bps = GetMinVideoBitrateBps();
+ stream.target_bitrate_bps = stream.max_bitrate_bps = max_bitrate_bps;
+ stream.max_qp = max_qp_;
+
+ if (CodecNamesEq(codec_name_, kVp9CodecName) && !is_screencast_) {
+ stream.temporal_layer_thresholds_bps.resize(GetDefaultVp9TemporalLayers() -
+ 1);
+ }
+
+ std::vector<webrtc::VideoStream> streams;
+ streams.push_back(stream);
+ return streams;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvideoengine.h b/third_party/libwebrtc/webrtc/media/engine/webrtcvideoengine.h
new file mode 100644
index 0000000000..132b5fd193
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvideoengine.h
@@ -0,0 +1,533 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_WEBRTCVIDEOENGINE_H_
+#define MEDIA_ENGINE_WEBRTCVIDEOENGINE_H_
+
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "api/call/transport.h"
+#include "api/optional.h"
+#include "api/video/video_frame.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "call/call.h"
+#include "call/flexfec_receive_stream.h"
+#include "call/video_receive_stream.h"
+#include "call/video_send_stream.h"
+#include "media/base/mediaengine.h"
+#include "media/base/videosinkinterface.h"
+#include "media/base/videosourceinterface.h"
+#include "media/engine/webrtcvideodecoderfactory.h"
+#include "media/engine/webrtcvideoencoderfactory.h"
+#include "rtc_base/asyncinvoker.h"
+#include "rtc_base/criticalsection.h"
+#include "rtc_base/networkroute.h"
+#include "rtc_base/thread_annotations.h"
+#include "rtc_base/thread_checker.h"
+
+namespace webrtc {
+class VideoDecoder;
+class VideoDecoderFactory;
+class VideoEncoder;
+class VideoEncoderFactory;
+struct MediaConfig;
+}
+
+namespace rtc {
+class Thread;
+} // namespace rtc
+
+namespace cricket {
+
+class DecoderFactoryAdapter;
+class VideoCapturer;
+class VideoProcessor;
+class VideoRenderer;
+class VoiceMediaChannel;
+class WebRtcDecoderObserver;
+class WebRtcEncoderObserver;
+class WebRtcLocalStreamInfo;
+class WebRtcRenderAdapter;
+class WebRtcVideoChannel;
+class WebRtcVideoChannelRecvInfo;
+class WebRtcVideoChannelSendInfo;
+class WebRtcVoiceEngine;
+class WebRtcVoiceMediaChannel;
+
+class UnsignalledSsrcHandler {
+ public:
+ enum Action {
+ kDropPacket,
+ kDeliverPacket,
+ };
+ virtual Action OnUnsignalledSsrc(WebRtcVideoChannel* channel,
+ uint32_t ssrc) = 0;
+ virtual ~UnsignalledSsrcHandler() = default;
+};
+
+// TODO(pbos): Remove, use external handlers only.
+class DefaultUnsignalledSsrcHandler : public UnsignalledSsrcHandler {
+ public:
+ DefaultUnsignalledSsrcHandler();
+ Action OnUnsignalledSsrc(WebRtcVideoChannel* channel,
+ uint32_t ssrc) override;
+
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* GetDefaultSink() const;
+ void SetDefaultSink(WebRtcVideoChannel* channel,
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink);
+
+ virtual ~DefaultUnsignalledSsrcHandler() = default;
+
+ private:
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* default_sink_;
+};
+
+// WebRtcVideoEngine is used for the new native WebRTC Video API (webrtc:1667).
+class WebRtcVideoEngine {
+ public:
+ // Internal SW video codecs will be added on top of the external codecs.
+ WebRtcVideoEngine(
+ std::unique_ptr<WebRtcVideoEncoderFactory> external_video_encoder_factory,
+ std::unique_ptr<WebRtcVideoDecoderFactory>
+ external_video_decoder_factory);
+
+ // These video codec factories represents all video codecs, i.e. both software
+ // and external hardware codecs.
+ WebRtcVideoEngine(
+ std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory,
+ std::unique_ptr<webrtc::VideoDecoderFactory> video_decoder_factory);
+
+ virtual ~WebRtcVideoEngine();
+
+ WebRtcVideoChannel* CreateChannel(webrtc::Call* call,
+ const MediaConfig& config,
+ const VideoOptions& options);
+
+ std::vector<VideoCodec> codecs() const;
+ RtpCapabilities GetCapabilities() const;
+
+ private:
+ const std::unique_ptr<DecoderFactoryAdapter> decoder_factory_;
+ const std::unique_ptr<webrtc::VideoEncoderFactory> encoder_factory_;
+};
+
+class WebRtcVideoChannel : public VideoMediaChannel, public webrtc::Transport {
+ public:
+ WebRtcVideoChannel(webrtc::Call* call,
+ const MediaConfig& config,
+ const VideoOptions& options,
+ webrtc::VideoEncoderFactory* encoder_factory,
+ DecoderFactoryAdapter* decoder_factory);
+ ~WebRtcVideoChannel() override;
+
+ // VideoMediaChannel implementation
+ rtc::DiffServCodePoint PreferredDscp() const override;
+
+ bool SetSendParameters(const VideoSendParameters& params) override;
+ bool SetRecvParameters(const VideoRecvParameters& params) override;
+ webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override;
+ bool SetRtpSendParameters(uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) override;
+ webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const override;
+ bool SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) override;
+ bool GetSendCodec(VideoCodec* send_codec) override;
+ bool SetSend(bool send) override;
+ bool SetVideoSend(
+ uint32_t ssrc,
+ bool enable,
+ const VideoOptions* options,
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source) override;
+ bool AddSendStream(const StreamParams& sp) override;
+ bool RemoveSendStream(uint32_t ssrc) override;
+ bool AddRecvStream(const StreamParams& sp) override;
+ bool AddRecvStream(const StreamParams& sp, bool default_stream);
+ bool RemoveRecvStream(uint32_t ssrc) override;
+ bool SetSink(uint32_t ssrc,
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override;
+ void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) override;
+ bool GetStats(VideoMediaInfo* info) override;
+
+ void OnPacketReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) override;
+ void OnRtcpReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) override;
+ void OnReadyToSend(bool ready) override;
+ void OnNetworkRouteChanged(const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) override;
+ void SetInterface(NetworkInterface* iface) override;
+
+ // Implemented for VideoMediaChannelTest.
+ bool sending() const { return sending_; }
+
+ rtc::Optional<uint32_t> GetDefaultReceiveStreamSsrc();
+
+ // AdaptReason is used for expressing why a WebRtcVideoSendStream request
+ // a lower input frame size than the currently configured camera input frame
+ // size. There can be more than one reason OR:ed together.
+ enum AdaptReason {
+ ADAPTREASON_NONE = 0,
+ ADAPTREASON_CPU = 1,
+ ADAPTREASON_BANDWIDTH = 2,
+ };
+
+ static constexpr int kDefaultQpMax = 56;
+
+ private:
+ class WebRtcVideoReceiveStream;
+ struct VideoCodecSettings {
+ VideoCodecSettings();
+
+ // Checks if all members of |*this| are equal to the corresponding members
+ // of |other|.
+ bool operator==(const VideoCodecSettings& other) const;
+ bool operator!=(const VideoCodecSettings& other) const;
+
+ // Checks if all members of |a|, except |flexfec_payload_type|, are equal
+ // to the corresponding members of |b|.
+ static bool EqualsDisregardingFlexfec(const VideoCodecSettings& a,
+ const VideoCodecSettings& b);
+
+ VideoCodec codec;
+ webrtc::UlpfecConfig ulpfec;
+ int flexfec_payload_type;
+ int rtx_payload_type;
+ };
+
+ struct ChangedSendParameters {
+ // These optionals are unset if not changed.
+ rtc::Optional<VideoCodecSettings> codec;
+ rtc::Optional<std::vector<webrtc::RtpExtension>> rtp_header_extensions;
+ rtc::Optional<int> max_bandwidth_bps;
+ rtc::Optional<bool> conference_mode;
+ rtc::Optional<webrtc::RtcpMode> rtcp_mode;
+ };
+
+ struct ChangedRecvParameters {
+ // These optionals are unset if not changed.
+ rtc::Optional<std::vector<VideoCodecSettings>> codec_settings;
+ rtc::Optional<std::vector<webrtc::RtpExtension>> rtp_header_extensions;
+ // Keep track of the FlexFEC payload type separately from |codec_settings|.
+ // This allows us to recreate the FlexfecReceiveStream separately from the
+ // VideoReceiveStream when the FlexFEC payload type is changed.
+ rtc::Optional<int> flexfec_payload_type;
+ };
+
+ bool GetChangedSendParameters(const VideoSendParameters& params,
+ ChangedSendParameters* changed_params) const;
+ bool GetChangedRecvParameters(const VideoRecvParameters& params,
+ ChangedRecvParameters* changed_params) const;
+
+ void SetMaxSendBandwidth(int bps);
+
+ void ConfigureReceiverRtp(
+ webrtc::VideoReceiveStream::Config* config,
+ webrtc::FlexfecReceiveStream::Config* flexfec_config,
+ const StreamParams& sp) const;
+ bool ValidateSendSsrcAvailability(const StreamParams& sp) const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(stream_crit_);
+ bool ValidateReceiveSsrcAvailability(const StreamParams& sp) const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(stream_crit_);
+ void DeleteReceiveStream(WebRtcVideoReceiveStream* stream)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(stream_crit_);
+
+ static std::string CodecSettingsVectorToString(
+ const std::vector<VideoCodecSettings>& codecs);
+
+ // Wrapper for the sender part.
+ class WebRtcVideoSendStream
+ : public rtc::VideoSourceInterface<webrtc::VideoFrame> {
+ public:
+ WebRtcVideoSendStream(
+ webrtc::Call* call,
+ const StreamParams& sp,
+ webrtc::VideoSendStream::Config config,
+ const VideoOptions& options,
+ webrtc::VideoEncoderFactory* encoder_factory,
+ bool enable_cpu_overuse_detection,
+ int max_bitrate_bps,
+ const rtc::Optional<VideoCodecSettings>& codec_settings,
+ const rtc::Optional<std::vector<webrtc::RtpExtension>>& rtp_extensions,
+ const VideoSendParameters& send_params);
+ virtual ~WebRtcVideoSendStream();
+
+ void SetSendParameters(const ChangedSendParameters& send_params);
+ bool SetRtpParameters(const webrtc::RtpParameters& parameters);
+ webrtc::RtpParameters GetRtpParameters() const;
+
+ // Implements rtc::VideoSourceInterface<webrtc::VideoFrame>.
+ // WebRtcVideoSendStream acts as a source to the webrtc::VideoSendStream
+ // in |stream_|. This is done to proxy VideoSinkWants from the encoder to
+ // the worker thread.
+ void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override;
+ void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override;
+
+ bool SetVideoSend(bool mute,
+ const VideoOptions* options,
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source);
+
+ void SetSend(bool send);
+
+ const std::vector<uint32_t>& GetSsrcs() const;
+ VideoSenderInfo GetVideoSenderInfo(bool log_stats);
+ void FillBitrateInfo(BandwidthEstimationInfo* bwe_info);
+
+ private:
+ // Parameters needed to reconstruct the underlying stream.
+ // webrtc::VideoSendStream doesn't support setting a lot of options on the
+ // fly, so when those need to be changed we tear down and reconstruct with
+ // similar parameters depending on which options changed etc.
+ struct VideoSendStreamParameters {
+ VideoSendStreamParameters(
+ webrtc::VideoSendStream::Config config,
+ const VideoOptions& options,
+ int max_bitrate_bps,
+ const rtc::Optional<VideoCodecSettings>& codec_settings);
+ webrtc::VideoSendStream::Config config;
+ VideoOptions options;
+ int max_bitrate_bps;
+ bool conference_mode;
+ rtc::Optional<VideoCodecSettings> codec_settings;
+ // Sent resolutions + bitrates etc. by the underlying VideoSendStream,
+ // typically changes when setting a new resolution or reconfiguring
+ // bitrates.
+ webrtc::VideoEncoderConfig encoder_config;
+ };
+
+ rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings>
+ ConfigureVideoEncoderSettings(const VideoCodec& codec);
+ void SetCodec(const VideoCodecSettings& codec,
+ bool force_encoder_allocation);
+ void RecreateWebRtcStream();
+ webrtc::VideoEncoderConfig CreateVideoEncoderConfig(
+ const VideoCodec& codec) const;
+ void ReconfigureEncoder();
+ bool ValidateRtpParameters(const webrtc::RtpParameters& parameters);
+
+ // Calls Start or Stop according to whether or not |sending_| is true,
+ // and whether or not the encoding in |rtp_parameters_| is active.
+ void UpdateSendState();
+
+ webrtc::VideoSendStream::DegradationPreference GetDegradationPreference()
+ const RTC_EXCLUSIVE_LOCKS_REQUIRED(&thread_checker_);
+
+ rtc::ThreadChecker thread_checker_;
+ rtc::AsyncInvoker invoker_;
+ rtc::Thread* worker_thread_;
+ const std::vector<uint32_t> ssrcs_ RTC_ACCESS_ON(&thread_checker_);
+ const std::vector<SsrcGroup> ssrc_groups_ RTC_ACCESS_ON(&thread_checker_);
+ webrtc::Call* const call_;
+ const bool enable_cpu_overuse_detection_;
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source_
+ RTC_ACCESS_ON(&thread_checker_);
+ webrtc::VideoEncoderFactory* const encoder_factory_
+ RTC_ACCESS_ON(&thread_checker_);
+
+ webrtc::VideoSendStream* stream_ RTC_ACCESS_ON(&thread_checker_);
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* encoder_sink_
+ RTC_ACCESS_ON(&thread_checker_);
+ // Contains settings that are the same for all streams in the MediaChannel,
+ // such as codecs, header extensions, and the global bitrate limit for the
+ // entire channel.
+ VideoSendStreamParameters parameters_ RTC_ACCESS_ON(&thread_checker_);
+ // Contains settings that are unique for each stream, such as max_bitrate.
+ // Does *not* contain codecs, however.
+ // TODO(skvlad): Move ssrcs_ and ssrc_groups_ into rtp_parameters_.
+ // TODO(skvlad): Combine parameters_ and rtp_parameters_ once we have only
+ // one stream per MediaChannel.
+ webrtc::RtpParameters rtp_parameters_ RTC_ACCESS_ON(&thread_checker_);
+ std::unique_ptr<webrtc::VideoEncoder> allocated_encoder_
+ RTC_ACCESS_ON(&thread_checker_);
+ VideoCodec allocated_codec_ RTC_ACCESS_ON(&thread_checker_);
+
+ bool sending_ RTC_ACCESS_ON(&thread_checker_);
+ };
+
+ // Wrapper for the receiver part, contains configs etc. that are needed to
+ // reconstruct the underlying VideoReceiveStream.
+ class WebRtcVideoReceiveStream
+ : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ WebRtcVideoReceiveStream(
+ webrtc::Call* call,
+ const StreamParams& sp,
+ webrtc::VideoReceiveStream::Config config,
+ DecoderFactoryAdapter* decoder_factory,
+ bool default_stream,
+ const std::vector<VideoCodecSettings>& recv_codecs,
+ const webrtc::FlexfecReceiveStream::Config& flexfec_config);
+ ~WebRtcVideoReceiveStream();
+
+ const std::vector<uint32_t>& GetSsrcs() const;
+ rtc::Optional<uint32_t> GetFirstPrimarySsrc() const;
+
+ void SetLocalSsrc(uint32_t local_ssrc);
+ // TODO(deadbeef): Move these feedback parameters into the recv parameters.
+ void SetFeedbackParameters(bool nack_enabled,
+ bool remb_enabled,
+ bool transport_cc_enabled,
+ webrtc::RtcpMode rtcp_mode);
+ void SetRecvParameters(const ChangedRecvParameters& recv_params);
+
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+ bool IsDefaultStream() const;
+
+ void SetSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink);
+
+ VideoReceiverInfo GetVideoReceiverInfo(bool log_stats);
+
+ private:
+ struct SdpVideoFormatCompare {
+ bool operator()(const webrtc::SdpVideoFormat& lhs,
+ const webrtc::SdpVideoFormat& rhs) const {
+ return std::tie(lhs.name, lhs.parameters) <
+ std::tie(rhs.name, rhs.parameters);
+ }
+ };
+ typedef std::map<webrtc::SdpVideoFormat,
+ std::unique_ptr<webrtc::VideoDecoder>,
+ SdpVideoFormatCompare>
+ DecoderMap;
+
+ void RecreateWebRtcVideoStream();
+ void MaybeRecreateWebRtcFlexfecStream();
+
+ void MaybeAssociateFlexfecWithVideo();
+ void MaybeDissociateFlexfecFromVideo();
+
+ void ConfigureCodecs(const std::vector<VideoCodecSettings>& recv_codecs,
+ DecoderMap* old_codecs);
+ void ConfigureFlexfecCodec(int flexfec_payload_type);
+
+ std::string GetCodecNameFromPayloadType(int payload_type);
+
+ webrtc::Call* const call_;
+ StreamParams stream_params_;
+
+ // Both |stream_| and |flexfec_stream_| are managed by |this|. They are
+ // destroyed by calling call_->DestroyVideoReceiveStream and
+ // call_->DestroyFlexfecReceiveStream, respectively.
+ webrtc::VideoReceiveStream* stream_;
+ const bool default_stream_;
+ webrtc::VideoReceiveStream::Config config_;
+ webrtc::FlexfecReceiveStream::Config flexfec_config_;
+ webrtc::FlexfecReceiveStream* flexfec_stream_;
+
+ DecoderFactoryAdapter* decoder_factory_;
+ DecoderMap allocated_decoders_;
+
+ rtc::CriticalSection sink_lock_;
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink_
+ RTC_GUARDED_BY(sink_lock_);
+ // Expands remote RTP timestamps to int64_t to be able to estimate how long
+ // the stream has been running.
+ rtc::TimestampWrapAroundHandler timestamp_wraparound_handler_
+ RTC_GUARDED_BY(sink_lock_);
+ int64_t first_frame_timestamp_ RTC_GUARDED_BY(sink_lock_);
+ // Start NTP time is estimated as current remote NTP time (estimated from
+ // RTCP) minus the elapsed time, as soon as remote NTP time is available.
+ int64_t estimated_remote_start_ntp_time_ms_ RTC_GUARDED_BY(sink_lock_);
+ };
+
+ void Construct(webrtc::Call* call, WebRtcVideoEngine* engine);
+
+ bool SendRtp(const uint8_t* data,
+ size_t len,
+ const webrtc::PacketOptions& options) override;
+ bool SendRtcp(const uint8_t* data, size_t len) override;
+
+ static std::vector<VideoCodecSettings> MapCodecs(
+ const std::vector<VideoCodec>& codecs);
+ // Select what video codec will be used for sending, i.e. what codec is used
+ // for local encoding, based on supported remote codecs. The first remote
+ // codec that is supported locally will be selected.
+ rtc::Optional<VideoCodecSettings> SelectSendVideoCodec(
+ const std::vector<VideoCodecSettings>& remote_mapped_codecs) const;
+
+ static bool NonFlexfecReceiveCodecsHaveChanged(
+ std::vector<VideoCodecSettings> before,
+ std::vector<VideoCodecSettings> after);
+
+ void FillSenderStats(VideoMediaInfo* info, bool log_stats);
+ void FillReceiverStats(VideoMediaInfo* info, bool log_stats);
+ void FillBandwidthEstimationStats(const webrtc::Call::Stats& stats,
+ VideoMediaInfo* info);
+ void FillSendAndReceiveCodecStats(VideoMediaInfo* video_media_info);
+
+ rtc::ThreadChecker thread_checker_;
+
+ uint32_t rtcp_receiver_report_ssrc_;
+ bool sending_;
+ webrtc::Call* const call_;
+
+ DefaultUnsignalledSsrcHandler default_unsignalled_ssrc_handler_;
+ UnsignalledSsrcHandler* const unsignalled_ssrc_handler_;
+
+ const MediaConfig::Video video_config_;
+
+ rtc::CriticalSection stream_crit_;
+ // Using primary-ssrc (first ssrc) as key.
+ std::map<uint32_t, WebRtcVideoSendStream*> send_streams_
+ RTC_GUARDED_BY(stream_crit_);
+ std::map<uint32_t, WebRtcVideoReceiveStream*> receive_streams_
+ RTC_GUARDED_BY(stream_crit_);
+ std::set<uint32_t> send_ssrcs_ RTC_GUARDED_BY(stream_crit_);
+ std::set<uint32_t> receive_ssrcs_ RTC_GUARDED_BY(stream_crit_);
+
+ rtc::Optional<VideoCodecSettings> send_codec_;
+ rtc::Optional<std::vector<webrtc::RtpExtension>> send_rtp_extensions_;
+
+ webrtc::VideoEncoderFactory* const encoder_factory_;
+ DecoderFactoryAdapter* const decoder_factory_;
+ std::vector<VideoCodecSettings> recv_codecs_;
+ std::vector<webrtc::RtpExtension> recv_rtp_extensions_;
+ // See reason for keeping track of the FlexFEC payload type separately in
+ // comment in WebRtcVideoChannel::ChangedRecvParameters.
+ int recv_flexfec_payload_type_;
+ webrtc::Call::Config::BitrateConfig bitrate_config_;
+ // TODO(deadbeef): Don't duplicate information between
+ // send_params/recv_params, rtp_extensions, options, etc.
+ VideoSendParameters send_params_;
+ VideoOptions default_send_options_;
+ VideoRecvParameters recv_params_;
+ int64_t last_stats_log_ms_;
+};
+
+class EncoderStreamFactory
+ : public webrtc::VideoEncoderConfig::VideoStreamFactoryInterface {
+ public:
+ EncoderStreamFactory(std::string codec_name,
+ int max_qp,
+ int max_framerate,
+ bool is_screencast,
+ bool conference_mode);
+
+ private:
+ std::vector<webrtc::VideoStream> CreateEncoderStreams(
+ int width,
+ int height,
+ const webrtc::VideoEncoderConfig& encoder_config) override;
+
+ const std::string codec_name_;
+ const int max_qp_;
+ const int max_framerate_;
+ const bool is_screencast_;
+ const bool conference_mode_;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_WEBRTCVIDEOENGINE_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvideoengine_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/webrtcvideoengine_unittest.cc
new file mode 100644
index 0000000000..7b1eb4a583
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvideoengine_unittest.cc
@@ -0,0 +1,4866 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <map>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "api/test/mock_video_decoder_factory.h"
+#include "api/test/mock_video_encoder_factory.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "call/flexfec_receive_stream.h"
+#include "common_video/h264/profile_level_id.h"
+#include "logging/rtc_event_log/rtc_event_log.h"
+#include "media/base/mediaconstants.h"
+#include "media/base/rtputils.h"
+#include "media/base/testutils.h"
+#include "media/base/videoengine_unittest.h"
+#include "media/engine/constants.h"
+#include "media/engine/fakewebrtccall.h"
+#include "media/engine/fakewebrtcvideoengine.h"
+#include "media/engine/internalencoderfactory.h"
+#include "media/engine/simulcast.h"
+#include "media/engine/webrtcvideoengine.h"
+#include "media/engine/webrtcvoiceengine.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/stringutils.h"
+#include "test/field_trial.h"
+#include "test/gmock.h"
+
+using webrtc::RtpExtension;
+
+namespace {
+static const int kDefaultQpMax = 56;
+
+static const uint8_t kRedRtxPayloadType = 125;
+
+static const uint32_t kSsrcs1[] = {1};
+static const uint32_t kSsrcs3[] = {1, 2, 3};
+static const uint32_t kRtxSsrcs1[] = {4};
+static const uint32_t kFlexfecSsrc = 5;
+static const uint32_t kIncomingUnsignalledSsrc = 0xC0FFEE;
+static const uint32_t kDefaultRecvSsrc = 0;
+
+static const char kUnsupportedExtensionName[] =
+ "urn:ietf:params:rtp-hdrext:unsupported";
+
+cricket::VideoCodec RemoveFeedbackParams(cricket::VideoCodec&& codec) {
+ codec.feedback_params = cricket::FeedbackParams();
+ return codec;
+}
+
+void VerifyCodecHasDefaultFeedbackParams(const cricket::VideoCodec& codec) {
+ EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)));
+ EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamNack, cricket::kRtcpFbNackParamPli)));
+ EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty)));
+ EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamTransportCc, cricket::kParamValueEmpty)));
+ EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir)));
+}
+
+// Return true if any codec in |codecs| is an RTX codec with associated payload
+// type |payload_type|.
+bool HasRtxCodec(const std::vector<cricket::VideoCodec>& codecs,
+ int payload_type) {
+ for (const cricket::VideoCodec& codec : codecs) {
+ int associated_payload_type;
+ if (cricket::CodecNamesEq(codec.name.c_str(), "rtx") &&
+ codec.GetParam(cricket::kCodecParamAssociatedPayloadType,
+ &associated_payload_type) &&
+ associated_payload_type == payload_type) {
+ return true;
+ }
+ }
+ return false;
+}
+
+// TODO(nisse): Duplicated in call.cc.
+const int* FindKeyByValue(const std::map<int, int>& m, int v) {
+ for (const auto& kv : m) {
+ if (kv.second == v)
+ return &kv.first;
+ }
+ return nullptr;
+}
+
+bool HasRtxReceiveAssociation(
+ const webrtc::VideoReceiveStream::Config& config,
+ int payload_type) {
+ return FindKeyByValue(config.rtp.rtx_associated_payload_types,
+ payload_type) != nullptr;
+}
+
+// Check that there's an Rtx payload type for each decoder.
+bool VerifyRtxReceiveAssociations(
+ const webrtc::VideoReceiveStream::Config& config) {
+ for (const auto& decoder : config.decoders) {
+ if (!HasRtxReceiveAssociation(config, decoder.payload_type))
+ return false;
+ }
+ return true;
+}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateBlackFrameBuffer(
+ int width,
+ int height) {
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer =
+ webrtc::I420Buffer::Create(width, height);
+ webrtc::I420Buffer::SetBlack(buffer);
+ return buffer;
+}
+
+void VerifySendStreamHasRtxTypes(const webrtc::VideoSendStream::Config& config,
+ const std::map<int, int>& rtx_types) {
+ std::map<int, int>::const_iterator it;
+ it = rtx_types.find(config.encoder_settings.payload_type);
+ EXPECT_TRUE(it != rtx_types.end() &&
+ it->second == config.rtp.rtx.payload_type);
+
+ if (config.rtp.ulpfec.red_rtx_payload_type != -1) {
+ it = rtx_types.find(config.rtp.ulpfec.red_payload_type);
+ EXPECT_TRUE(it != rtx_types.end() &&
+ it->second == config.rtp.ulpfec.red_rtx_payload_type);
+ }
+}
+
+cricket::MediaConfig GetMediaConfig() {
+ cricket::MediaConfig media_config;
+ media_config.video.enable_cpu_overuse_detection = false;
+ return media_config;
+}
+
+} // namespace
+
+namespace cricket {
+class WebRtcVideoEngineTest : public ::testing::Test {
+ public:
+ WebRtcVideoEngineTest() : WebRtcVideoEngineTest("") {}
+ explicit WebRtcVideoEngineTest(const char* field_trials)
+ : override_field_trials_(field_trials),
+ call_(webrtc::Call::Create(webrtc::Call::Config(&event_log_))),
+ encoder_factory_(new cricket::FakeWebRtcVideoEncoderFactory),
+ decoder_factory_(new cricket::FakeWebRtcVideoDecoderFactory),
+ engine_(std::unique_ptr<cricket::WebRtcVideoEncoderFactory>(
+ encoder_factory_),
+ std::unique_ptr<cricket::WebRtcVideoDecoderFactory>(
+ decoder_factory_)) {
+ std::vector<VideoCodec> engine_codecs = engine_.codecs();
+ RTC_DCHECK(!engine_codecs.empty());
+ bool codec_set = false;
+ for (const cricket::VideoCodec& codec : engine_codecs) {
+ if (codec.name == "rtx") {
+ int associated_payload_type;
+ if (codec.GetParam(kCodecParamAssociatedPayloadType,
+ &associated_payload_type)) {
+ default_apt_rtx_types_[associated_payload_type] = codec.id;
+ }
+ } else if (!codec_set && codec.name != "red" && codec.name != "ulpfec") {
+ default_codec_ = codec;
+ codec_set = true;
+ }
+ }
+
+ RTC_DCHECK(codec_set);
+ }
+
+ protected:
+ // Find the index of the codec in the engine with the given name. The codec
+ // must be present.
+ int GetEngineCodecIndex(const std::string& name) const;
+
+ // Find the codec in the engine with the given name. The codec must be
+ // present.
+ cricket::VideoCodec GetEngineCodec(const std::string& name) const;
+
+ VideoMediaChannel* SetUpForExternalEncoderFactory();
+
+ VideoMediaChannel* SetUpForExternalDecoderFactory(
+ const std::vector<VideoCodec>& codecs);
+
+ void TestExtendedEncoderOveruse(bool use_external_encoder);
+
+ webrtc::test::ScopedFieldTrials override_field_trials_;
+ webrtc::RtcEventLogNullImpl event_log_;
+ // Used in WebRtcVideoEngineVoiceTest, but defined here so it's properly
+ // initialized when the constructor is called.
+ std::unique_ptr<webrtc::Call> call_;
+ // TODO(magjed): Update all tests to use new video codec factories once the
+ // old factories are deprecated,
+ // https://bugs.chromium.org/p/webrtc/issues/detail?id=7925.
+ // These factories are owned by the video engine.
+ cricket::FakeWebRtcVideoEncoderFactory* encoder_factory_;
+ cricket::FakeWebRtcVideoDecoderFactory* decoder_factory_;
+ WebRtcVideoEngine engine_;
+ VideoCodec default_codec_;
+ std::map<int, int> default_apt_rtx_types_;
+};
+
+TEST_F(WebRtcVideoEngineTest, AnnouncesVp9AccordingToBuildFlags) {
+ bool claims_vp9_support = false;
+ for (const cricket::VideoCodec& codec : engine_.codecs()) {
+ if (codec.name == "VP9") {
+ claims_vp9_support = true;
+ break;
+ }
+ }
+#if defined(RTC_DISABLE_VP9)
+ EXPECT_FALSE(claims_vp9_support);
+#else
+ EXPECT_TRUE(claims_vp9_support);
+#endif // defined(RTC_DISABLE_VP9)
+}
+
+TEST_F(WebRtcVideoEngineTest, DefaultRtxCodecHasAssociatedPayloadTypeSet) {
+ std::vector<VideoCodec> engine_codecs = engine_.codecs();
+ for (size_t i = 0; i < engine_codecs.size(); ++i) {
+ if (engine_codecs[i].name != kRtxCodecName)
+ continue;
+ int associated_payload_type;
+ EXPECT_TRUE(engine_codecs[i].GetParam(kCodecParamAssociatedPayloadType,
+ &associated_payload_type));
+ EXPECT_EQ(default_codec_.id, associated_payload_type);
+ return;
+ }
+ FAIL() << "No RTX codec found among default codecs.";
+}
+
+TEST_F(WebRtcVideoEngineTest, SupportsTimestampOffsetHeaderExtension) {
+ RtpCapabilities capabilities = engine_.GetCapabilities();
+ ASSERT_FALSE(capabilities.header_extensions.empty());
+ for (const RtpExtension& extension : capabilities.header_extensions) {
+ if (extension.uri == RtpExtension::kTimestampOffsetUri) {
+ EXPECT_EQ(RtpExtension::kTimestampOffsetDefaultId, extension.id);
+ return;
+ }
+ }
+ FAIL() << "Timestamp offset extension not in header-extension list.";
+}
+
+TEST_F(WebRtcVideoEngineTest, SupportsAbsoluteSenderTimeHeaderExtension) {
+ RtpCapabilities capabilities = engine_.GetCapabilities();
+ ASSERT_FALSE(capabilities.header_extensions.empty());
+ for (const RtpExtension& extension : capabilities.header_extensions) {
+ if (extension.uri == RtpExtension::kAbsSendTimeUri) {
+ EXPECT_EQ(RtpExtension::kAbsSendTimeDefaultId, extension.id);
+ return;
+ }
+ }
+ FAIL() << "Absolute Sender Time extension not in header-extension list.";
+}
+
+TEST_F(WebRtcVideoEngineTest, SupportsTransportSequenceNumberHeaderExtension) {
+ RtpCapabilities capabilities = engine_.GetCapabilities();
+ ASSERT_FALSE(capabilities.header_extensions.empty());
+ for (const RtpExtension& extension : capabilities.header_extensions) {
+ if (extension.uri == RtpExtension::kTransportSequenceNumberUri) {
+ EXPECT_EQ(RtpExtension::kTransportSequenceNumberDefaultId, extension.id);
+ return;
+ }
+ }
+ FAIL() << "Transport sequence number extension not in header-extension list.";
+}
+
+TEST_F(WebRtcVideoEngineTest, SupportsVideoRotationHeaderExtension) {
+ RtpCapabilities capabilities = engine_.GetCapabilities();
+ ASSERT_FALSE(capabilities.header_extensions.empty());
+ for (const RtpExtension& extension : capabilities.header_extensions) {
+ if (extension.uri == RtpExtension::kVideoRotationUri) {
+ EXPECT_EQ(RtpExtension::kVideoRotationDefaultId, extension.id);
+ return;
+ }
+ }
+ FAIL() << "Video Rotation extension not in header-extension list.";
+}
+
+TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeCapturer) {
+ // Allocate the capturer first to prevent early destruction before channel's
+ // dtor is called.
+ cricket::FakeVideoCapturer capturer;
+
+ encoder_factory_->AddSupportedVideoCodecType("VP8");
+
+ std::unique_ptr<VideoMediaChannel> channel(SetUpForExternalEncoderFactory());
+ EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc)));
+
+ // Add CVO extension.
+ const int id = 1;
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.extensions.push_back(
+ RtpExtension(RtpExtension::kVideoRotationUri, id));
+ EXPECT_TRUE(channel->SetSendParameters(parameters));
+
+ // Set capturer.
+ EXPECT_TRUE(channel->SetVideoSend(kSsrc, true, nullptr, &capturer));
+
+ // Verify capturer has turned off applying rotation.
+ EXPECT_FALSE(capturer.apply_rotation());
+
+ // Verify removing header extension turns on applying rotation.
+ parameters.extensions.clear();
+ EXPECT_TRUE(channel->SetSendParameters(parameters));
+ EXPECT_TRUE(capturer.apply_rotation());
+}
+
+TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeAddSendStream) {
+ // Allocate the capturer first to prevent early destruction before channel's
+ // dtor is called.
+ cricket::FakeVideoCapturer capturer;
+
+ encoder_factory_->AddSupportedVideoCodecType("VP8");
+
+ std::unique_ptr<VideoMediaChannel> channel(SetUpForExternalEncoderFactory());
+ // Add CVO extension.
+ const int id = 1;
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.extensions.push_back(
+ RtpExtension(RtpExtension::kVideoRotationUri, id));
+ EXPECT_TRUE(channel->SetSendParameters(parameters));
+ EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc)));
+
+ // Set capturer.
+ EXPECT_TRUE(channel->SetVideoSend(kSsrc, true, nullptr, &capturer));
+
+ // Verify capturer has turned off applying rotation.
+ EXPECT_FALSE(capturer.apply_rotation());
+}
+
+TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionAfterCapturer) {
+ cricket::FakeVideoCapturer capturer;
+
+ encoder_factory_->AddSupportedVideoCodecType("VP8");
+ encoder_factory_->AddSupportedVideoCodecType("VP9");
+
+ std::unique_ptr<VideoMediaChannel> channel(SetUpForExternalEncoderFactory());
+ EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc)));
+
+ // Set capturer.
+ EXPECT_TRUE(channel->SetVideoSend(kSsrc, true, nullptr, &capturer));
+
+ // Verify capturer has turned on applying rotation.
+ EXPECT_TRUE(capturer.apply_rotation());
+
+ // Add CVO extension.
+ const int id = 1;
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("VP9"));
+ parameters.extensions.push_back(
+ RtpExtension(RtpExtension::kVideoRotationUri, id));
+ // Also remove the first codec to trigger a codec change as well.
+ parameters.codecs.erase(parameters.codecs.begin());
+ EXPECT_TRUE(channel->SetSendParameters(parameters));
+
+ // Verify capturer has turned off applying rotation.
+ EXPECT_FALSE(capturer.apply_rotation());
+
+ // Verify removing header extension turns on applying rotation.
+ parameters.extensions.clear();
+ EXPECT_TRUE(channel->SetSendParameters(parameters));
+ EXPECT_TRUE(capturer.apply_rotation());
+}
+
+TEST_F(WebRtcVideoEngineTest, SetSendFailsBeforeSettingCodecs) {
+ std::unique_ptr<VideoMediaChannel> channel(
+ engine_.CreateChannel(call_.get(), GetMediaConfig(), VideoOptions()));
+
+ EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(123)));
+
+ EXPECT_FALSE(channel->SetSend(true))
+ << "Channel should not start without codecs.";
+ EXPECT_TRUE(channel->SetSend(false))
+ << "Channel should be stoppable even without set codecs.";
+}
+
+TEST_F(WebRtcVideoEngineTest, GetStatsWithoutSendCodecsSetDoesNotCrash) {
+ std::unique_ptr<VideoMediaChannel> channel(
+ engine_.CreateChannel(call_.get(), GetMediaConfig(), VideoOptions()));
+ EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(123)));
+ VideoMediaInfo info;
+ channel->GetStats(&info);
+}
+
+TEST_F(WebRtcVideoEngineTest, UseExternalFactoryForVp8WhenSupported) {
+ encoder_factory_->AddSupportedVideoCodecType("VP8");
+
+ std::unique_ptr<VideoMediaChannel> channel(SetUpForExternalEncoderFactory());
+ channel->OnReadyToSend(true);
+
+ EXPECT_TRUE(
+ channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
+ EXPECT_EQ(0, encoder_factory_->GetNumCreatedEncoders());
+ EXPECT_TRUE(channel->SetSend(true));
+ cricket::FakeVideoCapturer capturer;
+ EXPECT_TRUE(channel->SetVideoSend(kSsrc, true, nullptr, &capturer));
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer.Start(capturer.GetSupportedFormats()->front()));
+ EXPECT_TRUE(capturer.CaptureFrame());
+ // Sending one frame will have allocate the encoder.
+ ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(1));
+ EXPECT_TRUE_WAIT(encoder_factory_->encoders()[0]->GetNumEncodedFrames() > 0,
+ kTimeout);
+
+ int num_created_encoders = encoder_factory_->GetNumCreatedEncoders();
+ EXPECT_EQ(num_created_encoders, 1);
+
+ // Setting codecs of the same type should not reallocate any encoders
+ // (expecting a no-op).
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ EXPECT_TRUE(channel->SetSendParameters(parameters));
+ EXPECT_EQ(num_created_encoders, encoder_factory_->GetNumCreatedEncoders());
+
+ // Remove stream previously added to free the external encoder instance.
+ EXPECT_TRUE(channel->RemoveSendStream(kSsrc));
+ EXPECT_EQ(0u, encoder_factory_->encoders().size());
+}
+
+// Test that when an external encoder factory supports a codec we don't
+// internally support, we still add an RTX codec for it.
+// TODO(deadbeef): Currently this test is only effective if WebRTC is
+// built with no internal H264 support. This test should be updated
+// if/when we start adding RTX codecs for unrecognized codec names.
+TEST_F(WebRtcVideoEngineTest, RtxCodecAddedForExternalCodec) {
+ using webrtc::H264::ProfileLevelIdToString;
+ using webrtc::H264::ProfileLevelId;
+ using webrtc::H264::kLevel1;
+ cricket::VideoCodec h264_constrained_baseline("H264");
+ h264_constrained_baseline.params[kH264FmtpProfileLevelId] =
+ *ProfileLevelIdToString(
+ ProfileLevelId(webrtc::H264::kProfileConstrainedBaseline, kLevel1));
+ cricket::VideoCodec h264_constrained_high("H264");
+ h264_constrained_high.params[kH264FmtpProfileLevelId] =
+ *ProfileLevelIdToString(
+ ProfileLevelId(webrtc::H264::kProfileConstrainedHigh, kLevel1));
+ cricket::VideoCodec h264_high("H264");
+ h264_high.params[kH264FmtpProfileLevelId] = *ProfileLevelIdToString(
+ ProfileLevelId(webrtc::H264::kProfileHigh, kLevel1));
+
+ encoder_factory_->AddSupportedVideoCodec(h264_constrained_baseline);
+ encoder_factory_->AddSupportedVideoCodec(h264_constrained_high);
+ encoder_factory_->AddSupportedVideoCodec(h264_high);
+
+ // First figure out what payload types the test codecs got assigned.
+ const std::vector<cricket::VideoCodec> codecs = engine_.codecs();
+ // Now search for RTX codecs for them. Expect that they all have associated
+ // RTX codecs.
+ EXPECT_TRUE(HasRtxCodec(
+ codecs, FindMatchingCodec(codecs, h264_constrained_baseline)->id));
+ EXPECT_TRUE(HasRtxCodec(
+ codecs, FindMatchingCodec(codecs, h264_constrained_high)->id));
+ EXPECT_TRUE(HasRtxCodec(
+ codecs, FindMatchingCodec(codecs, h264_high)->id));
+}
+
+void WebRtcVideoEngineTest::TestExtendedEncoderOveruse(
+ bool use_external_encoder) {
+ std::unique_ptr<VideoMediaChannel> channel;
+ FakeCall* fake_call = new FakeCall(webrtc::Call::Config(&event_log_));
+ call_.reset(fake_call);
+ if (use_external_encoder) {
+ encoder_factory_->AddSupportedVideoCodecType("VP8");
+ channel.reset(SetUpForExternalEncoderFactory());
+ } else {
+ channel.reset(
+ engine_.CreateChannel(call_.get(), GetMediaConfig(), VideoOptions()));
+ }
+ ASSERT_TRUE(
+ channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ EXPECT_TRUE(channel->SetSendParameters(parameters));
+ EXPECT_TRUE(channel->SetSend(true));
+ FakeVideoSendStream* stream = fake_call->GetVideoSendStreams()[0];
+
+ EXPECT_EQ(use_external_encoder,
+ stream->GetConfig().encoder_settings.full_overuse_time);
+ // Remove stream previously added to free the external encoder instance.
+ EXPECT_TRUE(channel->RemoveSendStream(kSsrc));
+}
+
+TEST_F(WebRtcVideoEngineTest, EnablesFullEncoderTimeForExternalEncoders) {
+ TestExtendedEncoderOveruse(true);
+}
+
+TEST_F(WebRtcVideoEngineTest, DisablesFullEncoderTimeForNonExternalEncoders) {
+ TestExtendedEncoderOveruse(false);
+}
+
+#if !defined(RTC_DISABLE_VP9)
+TEST_F(WebRtcVideoEngineTest, CanConstructDecoderForVp9EncoderFactory) {
+ encoder_factory_->AddSupportedVideoCodecType("VP9");
+
+ std::unique_ptr<VideoMediaChannel> channel(SetUpForExternalEncoderFactory());
+
+ EXPECT_TRUE(
+ channel->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc)));
+}
+#endif // !defined(RTC_DISABLE_VP9)
+
+TEST_F(WebRtcVideoEngineTest, PropagatesInputFrameTimestamp) {
+ encoder_factory_->AddSupportedVideoCodecType("VP8");
+ FakeCall* fake_call = new FakeCall(webrtc::Call::Config(&event_log_));
+ call_.reset(fake_call);
+ std::unique_ptr<VideoMediaChannel> channel(SetUpForExternalEncoderFactory());
+
+ EXPECT_TRUE(
+ channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
+
+ FakeVideoCapturer capturer;
+ EXPECT_TRUE(channel->SetVideoSend(kSsrc, true, nullptr, &capturer));
+ capturer.Start(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(60),
+ cricket::FOURCC_I420));
+ channel->SetSend(true);
+
+ FakeVideoSendStream* stream = fake_call->GetVideoSendStreams()[0];
+
+ EXPECT_TRUE(capturer.CaptureFrame());
+ int64_t last_timestamp = stream->GetLastTimestamp();
+ for (int i = 0; i < 10; i++) {
+ EXPECT_TRUE(capturer.CaptureFrame());
+ int64_t timestamp = stream->GetLastTimestamp();
+ int64_t interval = timestamp - last_timestamp;
+
+ // Precision changes from nanosecond to millisecond.
+ // Allow error to be no more than 1.
+ EXPECT_NEAR(cricket::VideoFormat::FpsToInterval(60) / 1E6, interval, 1);
+
+ last_timestamp = timestamp;
+ }
+
+ capturer.Start(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420));
+
+ EXPECT_TRUE(capturer.CaptureFrame());
+ last_timestamp = stream->GetLastTimestamp();
+ for (int i = 0; i < 10; i++) {
+ EXPECT_TRUE(capturer.CaptureFrame());
+ int64_t timestamp = stream->GetLastTimestamp();
+ int64_t interval = timestamp - last_timestamp;
+
+ // Precision changes from nanosecond to millisecond.
+ // Allow error to be no more than 1.
+ EXPECT_NEAR(cricket::VideoFormat::FpsToInterval(30) / 1E6, interval, 1);
+
+ last_timestamp = timestamp;
+ }
+
+ // Remove stream previously added to free the external encoder instance.
+ EXPECT_TRUE(channel->RemoveSendStream(kSsrc));
+}
+
+int WebRtcVideoEngineTest::GetEngineCodecIndex(const std::string& name) const {
+ const std::vector<cricket::VideoCodec> codecs = engine_.codecs();
+ for (size_t i = 0; i < codecs.size(); ++i) {
+ const cricket::VideoCodec engine_codec = codecs[i];
+ if (!CodecNamesEq(name, engine_codec.name))
+ continue;
+ // The tests only use H264 Constrained Baseline. Make sure we don't return
+ // an internal H264 codec from the engine with a different H264 profile.
+ if (CodecNamesEq(name.c_str(), kH264CodecName)) {
+ const rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id =
+ webrtc::H264::ParseSdpProfileLevelId(engine_codec.params);
+ if (profile_level_id->profile !=
+ webrtc::H264::kProfileConstrainedBaseline) {
+ continue;
+ }
+ }
+ return i;
+ }
+ // This point should never be reached.
+ ADD_FAILURE() << "Unrecognized codec name: " << name;
+ return -1;
+}
+
+cricket::VideoCodec WebRtcVideoEngineTest::GetEngineCodec(
+ const std::string& name) const {
+ return engine_.codecs()[GetEngineCodecIndex(name)];
+}
+
+VideoMediaChannel* WebRtcVideoEngineTest::SetUpForExternalEncoderFactory() {
+ VideoMediaChannel* channel =
+ engine_.CreateChannel(call_.get(), GetMediaConfig(), VideoOptions());
+ cricket::VideoSendParameters parameters;
+ // We need to look up the codec in the engine to get the correct payload type.
+ for (const VideoCodec& codec : encoder_factory_->supported_codecs())
+ parameters.codecs.push_back(GetEngineCodec(codec.name));
+
+ EXPECT_TRUE(channel->SetSendParameters(parameters));
+
+ return channel;
+}
+
+VideoMediaChannel* WebRtcVideoEngineTest::SetUpForExternalDecoderFactory(
+ const std::vector<VideoCodec>& codecs) {
+ VideoMediaChannel* channel =
+ engine_.CreateChannel(call_.get(), GetMediaConfig(), VideoOptions());
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs = codecs;
+ EXPECT_TRUE(channel->SetRecvParameters(parameters));
+
+ return channel;
+}
+
+TEST_F(WebRtcVideoEngineTest, UsesSimulcastAdapterForVp8Factories) {
+ encoder_factory_->AddSupportedVideoCodecType("VP8");
+
+ std::unique_ptr<VideoMediaChannel> channel(SetUpForExternalEncoderFactory());
+
+ std::vector<uint32_t> ssrcs = MAKE_VECTOR(kSsrcs3);
+
+ EXPECT_TRUE(
+ channel->AddSendStream(CreateSimStreamParams("cname", ssrcs)));
+ EXPECT_TRUE(channel->SetSend(true));
+
+ cricket::FakeVideoCapturer capturer;
+ EXPECT_TRUE(channel->SetVideoSend(ssrcs.front(), true, nullptr, &capturer));
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer.Start(capturer.GetSupportedFormats()->front()));
+ EXPECT_TRUE(capturer.CaptureFrame());
+
+ ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(2));
+
+ // Verify that encoders are configured for simulcast through adapter
+ // (increasing resolution and only configured to send one stream each).
+ int prev_width = -1;
+ for (size_t i = 0; i < encoder_factory_->encoders().size(); ++i) {
+ ASSERT_TRUE(encoder_factory_->encoders()[i]->WaitForInitEncode());
+ webrtc::VideoCodec codec_settings =
+ encoder_factory_->encoders()[i]->GetCodecSettings();
+ EXPECT_EQ(0, codec_settings.numberOfSimulcastStreams);
+ EXPECT_GT(codec_settings.width, prev_width);
+ prev_width = codec_settings.width;
+ }
+
+ EXPECT_TRUE(channel->SetVideoSend(ssrcs.front(), true, nullptr, nullptr));
+
+ channel.reset();
+ ASSERT_EQ(0u, encoder_factory_->encoders().size());
+}
+
+TEST_F(WebRtcVideoEngineTest, ChannelWithExternalH264CanChangeToInternalVp8) {
+ encoder_factory_->AddSupportedVideoCodecType("H264");
+
+ std::unique_ptr<VideoMediaChannel> channel(SetUpForExternalEncoderFactory());
+
+ EXPECT_TRUE(
+ channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
+ ASSERT_EQ(1u, encoder_factory_->encoders().size());
+
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ EXPECT_TRUE(channel->SetSendParameters(parameters));
+ ASSERT_EQ(0u, encoder_factory_->encoders().size());
+}
+
+TEST_F(WebRtcVideoEngineTest,
+ DontUseExternalEncoderFactoryForUnsupportedCodecs) {
+ encoder_factory_->AddSupportedVideoCodecType("H264");
+
+ std::unique_ptr<VideoMediaChannel> channel(
+ engine_.CreateChannel(call_.get(), GetMediaConfig(), VideoOptions()));
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ EXPECT_TRUE(channel->SetSendParameters(parameters));
+
+ EXPECT_TRUE(
+ channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
+ // Make sure DestroyVideoEncoder was called on the factory.
+ ASSERT_EQ(0u, encoder_factory_->encoders().size());
+}
+
+TEST_F(WebRtcVideoEngineTest,
+ UsesSimulcastAdapterForVp8WithCombinedVP8AndH264Factory) {
+ encoder_factory_->AddSupportedVideoCodecType("VP8");
+ encoder_factory_->AddSupportedVideoCodecType("H264");
+
+ std::unique_ptr<VideoMediaChannel> channel(
+ engine_.CreateChannel(call_.get(), GetMediaConfig(), VideoOptions()));
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ EXPECT_TRUE(channel->SetSendParameters(parameters));
+
+ std::vector<uint32_t> ssrcs = MAKE_VECTOR(kSsrcs3);
+
+ EXPECT_TRUE(
+ channel->AddSendStream(CreateSimStreamParams("cname", ssrcs)));
+ EXPECT_TRUE(channel->SetSend(true));
+
+ // Send a fake frame, or else the media engine will configure the simulcast
+ // encoder adapter at a low-enough size that it'll only create a single
+ // encoder layer.
+ cricket::FakeVideoCapturer capturer;
+ EXPECT_TRUE(channel->SetVideoSend(ssrcs.front(), true, nullptr, &capturer));
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer.Start(capturer.GetSupportedFormats()->front()));
+ EXPECT_TRUE(capturer.CaptureFrame());
+
+ ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(2));
+ ASSERT_TRUE(encoder_factory_->encoders()[0]->WaitForInitEncode());
+ EXPECT_EQ(webrtc::kVideoCodecVP8,
+ encoder_factory_->encoders()[0]->GetCodecSettings().codecType);
+
+ channel.reset();
+ // Make sure DestroyVideoEncoder was called on the factory.
+ EXPECT_EQ(0u, encoder_factory_->encoders().size());
+}
+
+TEST_F(WebRtcVideoEngineTest,
+ DestroysNonSimulcastEncoderFromCombinedVP8AndH264Factory) {
+ encoder_factory_->AddSupportedVideoCodecType("VP8");
+ encoder_factory_->AddSupportedVideoCodecType("H264");
+
+ std::unique_ptr<VideoMediaChannel> channel(
+ engine_.CreateChannel(call_.get(), GetMediaConfig(), VideoOptions()));
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("H264"));
+ EXPECT_TRUE(channel->SetSendParameters(parameters));
+
+ EXPECT_TRUE(
+ channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
+ ASSERT_EQ(1u, encoder_factory_->encoders().size());
+
+ // Send a frame of 720p. This should trigger a "real" encoder initialization.
+ cricket::VideoFormat format(
+ 1280, 720, cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420);
+ cricket::FakeVideoCapturer capturer;
+ EXPECT_TRUE(channel->SetVideoSend(kSsrc, true, nullptr, &capturer));
+ EXPECT_EQ(cricket::CS_RUNNING, capturer.Start(format));
+ EXPECT_TRUE(capturer.CaptureFrame());
+ ASSERT_TRUE(encoder_factory_->encoders()[0]->WaitForInitEncode());
+ EXPECT_EQ(webrtc::kVideoCodecH264,
+ encoder_factory_->encoders()[0]->GetCodecSettings().codecType);
+
+ channel.reset();
+ // Make sure DestroyVideoEncoder was called on the factory.
+ ASSERT_EQ(0u, encoder_factory_->encoders().size());
+}
+
+TEST_F(WebRtcVideoEngineTest, SimulcastDisabledForH264) {
+ encoder_factory_->AddSupportedVideoCodecType("H264");
+
+ std::unique_ptr<VideoMediaChannel> channel(SetUpForExternalEncoderFactory());
+
+ const std::vector<uint32_t> ssrcs = MAKE_VECTOR(kSsrcs3);
+ EXPECT_TRUE(
+ channel->AddSendStream(cricket::CreateSimStreamParams("cname", ssrcs)));
+
+ // Send a frame of 720p. This should trigger a "real" encoder initialization.
+ cricket::VideoFormat format(
+ 1280, 720, cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420);
+ cricket::FakeVideoCapturer capturer;
+ EXPECT_TRUE(channel->SetVideoSend(ssrcs[0], true, nullptr, &capturer));
+ EXPECT_EQ(cricket::CS_RUNNING, capturer.Start(format));
+ EXPECT_TRUE(capturer.CaptureFrame());
+
+ ASSERT_EQ(1u, encoder_factory_->encoders().size());
+ FakeWebRtcVideoEncoder* encoder = encoder_factory_->encoders()[0];
+ ASSERT_TRUE(encoder_factory_->encoders()[0]->WaitForInitEncode());
+ EXPECT_EQ(webrtc::kVideoCodecH264, encoder->GetCodecSettings().codecType);
+ EXPECT_EQ(1u, encoder->GetCodecSettings().numberOfSimulcastStreams);
+ EXPECT_TRUE(channel->SetVideoSend(ssrcs[0], true, nullptr, nullptr));
+}
+
+// Test that the FlexFEC field trial properly alters the output of
+// WebRtcVideoEngine::codecs(), for an existing |engine_| object.
+//
+// TODO(brandtr): Remove this test, when the FlexFEC field trial is gone.
+TEST_F(WebRtcVideoEngineTest,
+ Flexfec03SupportedAsInternalCodecBehindFieldTrial) {
+ auto is_flexfec = [](const VideoCodec& codec) {
+ if (codec.name == "flexfec-03")
+ return true;
+ return false;
+ };
+
+ // FlexFEC is not active without field trial.
+ const std::vector<VideoCodec> codecs_before = engine_.codecs();
+ EXPECT_EQ(codecs_before.end(), std::find_if(codecs_before.begin(),
+ codecs_before.end(), is_flexfec));
+
+ // FlexFEC is active with field trial.
+ webrtc::test::ScopedFieldTrials override_field_trials_(
+ "WebRTC-FlexFEC-03-Advertised/Enabled/");
+ const std::vector<VideoCodec> codecs_after = engine_.codecs();
+ EXPECT_NE(codecs_after.end(),
+ std::find_if(codecs_after.begin(), codecs_after.end(), is_flexfec));
+}
+
+// Test that external codecs are added after internal SW codecs.
+TEST_F(WebRtcVideoEngineTest, ReportSupportedExternalCodecs) {
+ const char* kFakeExternalCodecName = "FakeExternalCodec";
+ encoder_factory_->AddSupportedVideoCodecType(kFakeExternalCodecName);
+
+ // The external codec should appear after the internal codec in the vector.
+ const int vp8_index = GetEngineCodecIndex("VP8");
+ const int fake_external_codec_index =
+ GetEngineCodecIndex(kFakeExternalCodecName);
+ EXPECT_LT(vp8_index, fake_external_codec_index);
+}
+
+// Test that an external codec that was added after the engine was initialized
+// does show up in the codec list after it was added.
+TEST_F(WebRtcVideoEngineTest, ReportSupportedExternalCodecsWithAddedCodec) {
+ const char* kFakeExternalCodecName1 = "FakeExternalCodec1";
+ const char* kFakeExternalCodecName2 = "FakeExternalCodec2";
+
+ // Set up external encoder factory with first codec, and initialize engine.
+ encoder_factory_->AddSupportedVideoCodecType(kFakeExternalCodecName1);
+
+ std::vector<cricket::VideoCodec> codecs_before(engine_.codecs());
+
+ // Add second codec.
+ encoder_factory_->AddSupportedVideoCodecType(kFakeExternalCodecName2);
+ std::vector<cricket::VideoCodec> codecs_after(engine_.codecs());
+ EXPECT_EQ(codecs_before.size() + 1, codecs_after.size());
+
+ // Check that both fake codecs are present and that the second fake codec
+ // appears after the first fake codec.
+ const int fake_codec_index1 = GetEngineCodecIndex(kFakeExternalCodecName1);
+ const int fake_codec_index2 = GetEngineCodecIndex(kFakeExternalCodecName2);
+ EXPECT_LT(fake_codec_index1, fake_codec_index2);
+}
+
+TEST_F(WebRtcVideoEngineTest, RegisterExternalDecodersIfSupported) {
+ decoder_factory_->AddSupportedVideoCodecType(webrtc::kVideoCodecVP8);
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+
+ std::unique_ptr<VideoMediaChannel> channel(
+ SetUpForExternalDecoderFactory(parameters.codecs));
+
+ EXPECT_TRUE(
+ channel->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc)));
+ ASSERT_EQ(1u, decoder_factory_->decoders().size());
+
+ // Setting codecs of the same type should not reallocate the decoder.
+ EXPECT_TRUE(channel->SetRecvParameters(parameters));
+ EXPECT_EQ(1, decoder_factory_->GetNumCreatedDecoders());
+
+ // Remove stream previously added to free the external decoder instance.
+ EXPECT_TRUE(channel->RemoveRecvStream(kSsrc));
+ EXPECT_EQ(0u, decoder_factory_->decoders().size());
+}
+
+// Verifies that we can set up decoders that are not internally supported.
+TEST_F(WebRtcVideoEngineTest, RegisterExternalH264DecoderIfSupported) {
+ // TODO(pbos): Do not assume that encoder/decoder support is symmetric. We
+ // can't even query the WebRtcVideoDecoderFactory for supported codecs.
+ // For now we add a FakeWebRtcVideoEncoderFactory to add H264 to supported
+ // codecs.
+ encoder_factory_->AddSupportedVideoCodecType("H264");
+ decoder_factory_->AddSupportedVideoCodecType(webrtc::kVideoCodecH264);
+ std::vector<cricket::VideoCodec> codecs;
+ codecs.push_back(GetEngineCodec("H264"));
+
+ std::unique_ptr<VideoMediaChannel> channel(
+ SetUpForExternalDecoderFactory(codecs));
+
+ EXPECT_TRUE(
+ channel->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc)));
+ ASSERT_EQ(1u, decoder_factory_->decoders().size());
+}
+
+TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, NullFactories) {
+ std::unique_ptr<webrtc::VideoEncoderFactory> encoder_factory;
+ std::unique_ptr<webrtc::VideoDecoderFactory> decoder_factory;
+ WebRtcVideoEngine engine(std::move(encoder_factory),
+ std::move(decoder_factory));
+ EXPECT_EQ(0u, engine.codecs().size());
+}
+
+TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, EmptyFactories) {
+ // |engine| take ownership of the factories.
+ webrtc::MockVideoEncoderFactory* encoder_factory =
+ new webrtc::MockVideoEncoderFactory();
+ webrtc::MockVideoDecoderFactory* decoder_factory =
+ new webrtc::MockVideoDecoderFactory();
+ WebRtcVideoEngine engine(
+ (std::unique_ptr<webrtc::VideoEncoderFactory>(encoder_factory)),
+ (std::unique_ptr<webrtc::VideoDecoderFactory>(decoder_factory)));
+ EXPECT_CALL(*encoder_factory, GetSupportedFormats());
+ EXPECT_EQ(0u, engine.codecs().size());
+ EXPECT_CALL(*encoder_factory, Die());
+ EXPECT_CALL(*decoder_factory, Die());
+}
+
+// Test full behavior in the video engine when video codec factories of the new
+// type are injected supporting the single codec Vp8. Check the returned codecs
+// from the engine and that we will create a Vp8 encoder and decoder using the
+// new factories.
+TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) {
+ // |engine| take ownership of the factories.
+ webrtc::MockVideoEncoderFactory* encoder_factory =
+ new webrtc::MockVideoEncoderFactory();
+ webrtc::MockVideoDecoderFactory* decoder_factory =
+ new webrtc::MockVideoDecoderFactory();
+ WebRtcVideoEngine engine(
+ (std::unique_ptr<webrtc::VideoEncoderFactory>(encoder_factory)),
+ (std::unique_ptr<webrtc::VideoDecoderFactory>(decoder_factory)));
+ const webrtc::SdpVideoFormat vp8_format("VP8");
+ const std::vector<webrtc::SdpVideoFormat> supported_formats = {vp8_format};
+ EXPECT_CALL(*encoder_factory, GetSupportedFormats())
+ .WillRepeatedly(testing::Return(supported_formats));
+
+ // Verify the codecs from the engine.
+ const std::vector<VideoCodec> engine_codecs = engine.codecs();
+ // Verify default codecs has been added correctly.
+ EXPECT_EQ(5u, engine_codecs.size());
+ EXPECT_EQ("VP8", engine_codecs.at(0).name);
+
+ // RTX codec for VP8.
+ EXPECT_EQ("rtx", engine_codecs.at(1).name);
+ int vp8_associated_payload;
+ EXPECT_TRUE(engine_codecs.at(1).GetParam(kCodecParamAssociatedPayloadType,
+ &vp8_associated_payload));
+ EXPECT_EQ(vp8_associated_payload, engine_codecs.at(0).id);
+
+ EXPECT_EQ(kRedCodecName, engine_codecs.at(2).name);
+
+ // RTX codec for RED.
+ EXPECT_EQ("rtx", engine_codecs.at(3).name);
+ int red_associated_payload;
+ EXPECT_TRUE(engine_codecs.at(3).GetParam(kCodecParamAssociatedPayloadType,
+ &red_associated_payload));
+ EXPECT_EQ(red_associated_payload, engine_codecs.at(2).id);
+
+ EXPECT_EQ(kUlpfecCodecName, engine_codecs.at(4).name);
+
+ int associated_payload_type;
+ EXPECT_TRUE(engine_codecs.at(1).GetParam(
+ cricket::kCodecParamAssociatedPayloadType, &associated_payload_type));
+ EXPECT_EQ(engine_codecs.at(0).id, associated_payload_type);
+ // Verify default parameters has been added to the VP8 codec.
+ VerifyCodecHasDefaultFeedbackParams(engine_codecs.at(0));
+
+ // Mock encoder creation. |engine| take ownership of the encoder.
+ webrtc::VideoEncoderFactory::CodecInfo codec_info;
+ codec_info.is_hardware_accelerated = false;
+ codec_info.has_internal_source = false;
+ const webrtc::SdpVideoFormat format("VP8");
+ EXPECT_CALL(*encoder_factory, QueryVideoEncoder(format))
+ .WillRepeatedly(testing::Return(codec_info));
+ FakeWebRtcVideoEncoder* const encoder = new FakeWebRtcVideoEncoder();
+ EXPECT_CALL(*encoder_factory, CreateVideoEncoderProxy(format))
+ .WillOnce(testing::Return(encoder));
+
+ // Mock decoder creation. |engine| take ownership of the decoder.
+ FakeWebRtcVideoDecoder* const decoder = new FakeWebRtcVideoDecoder();
+ EXPECT_CALL(*decoder_factory, CreateVideoDecoderProxy(format))
+ .WillOnce(testing::Return(decoder));
+
+ // Create a call.
+ webrtc::RtcEventLogNullImpl event_log;
+ std::unique_ptr<webrtc::Call> call(
+ webrtc::Call::Create(webrtc::Call::Config(&event_log)));
+
+ // Create send channel.
+ const int send_ssrc = 123;
+ std::unique_ptr<VideoMediaChannel> send_channel(
+ engine.CreateChannel(call.get(), GetMediaConfig(), VideoOptions()));
+ cricket::VideoSendParameters send_parameters;
+ send_parameters.codecs.push_back(engine_codecs.at(0));
+ EXPECT_TRUE(send_channel->SetSendParameters(send_parameters));
+ send_channel->OnReadyToSend(true);
+ EXPECT_TRUE(
+ send_channel->AddSendStream(StreamParams::CreateLegacy(send_ssrc)));
+ EXPECT_TRUE(send_channel->SetSend(true));
+
+ // Create recv channel.
+ const int recv_ssrc = 321;
+ std::unique_ptr<VideoMediaChannel> recv_channel(
+ engine.CreateChannel(call.get(), GetMediaConfig(), VideoOptions()));
+ cricket::VideoRecvParameters recv_parameters;
+ recv_parameters.codecs.push_back(engine_codecs.at(0));
+ EXPECT_TRUE(recv_channel->SetRecvParameters(recv_parameters));
+ EXPECT_TRUE(recv_channel->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(recv_ssrc)));
+
+ // Remove streams previously added to free the encoder and decoder instance.
+ EXPECT_CALL(*encoder_factory, Die());
+ EXPECT_CALL(*decoder_factory, Die());
+ EXPECT_TRUE(send_channel->RemoveSendStream(send_ssrc));
+ EXPECT_TRUE(recv_channel->RemoveRecvStream(recv_ssrc));
+}
+
+// Test behavior when decoder factory fails to create a decoder (returns null).
+TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, NullDecoder) {
+ // |engine| take ownership of the factories.
+ webrtc::MockVideoEncoderFactory* encoder_factory =
+ new testing::StrictMock<webrtc::MockVideoEncoderFactory>();
+ webrtc::MockVideoDecoderFactory* decoder_factory =
+ new testing::StrictMock<webrtc::MockVideoDecoderFactory>();
+ WebRtcVideoEngine engine(
+ (std::unique_ptr<webrtc::VideoEncoderFactory>(encoder_factory)),
+ (std::unique_ptr<webrtc::VideoDecoderFactory>(decoder_factory)));
+ const webrtc::SdpVideoFormat vp8_format("VP8");
+ const std::vector<webrtc::SdpVideoFormat> supported_formats = {vp8_format};
+ EXPECT_CALL(*encoder_factory, GetSupportedFormats())
+ .WillRepeatedly(testing::Return(supported_formats));
+
+ // Decoder creation fails.
+ EXPECT_CALL(*decoder_factory, CreateVideoDecoderProxy(testing::_))
+ .WillOnce(testing::Return(nullptr));
+
+ // Create a call.
+ webrtc::RtcEventLogNullImpl event_log;
+ std::unique_ptr<webrtc::Call> call(
+ webrtc::Call::Create(webrtc::Call::Config(&event_log)));
+
+ // Create recv channel.
+ const int recv_ssrc = 321;
+ std::unique_ptr<VideoMediaChannel> recv_channel(
+ engine.CreateChannel(call.get(), GetMediaConfig(), VideoOptions()));
+ cricket::VideoRecvParameters recv_parameters;
+ recv_parameters.codecs.push_back(engine.codecs().front());
+ EXPECT_TRUE(recv_channel->SetRecvParameters(recv_parameters));
+ EXPECT_TRUE(recv_channel->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(recv_ssrc)));
+
+ // Remove streams previously added to free the encoder and decoder instance.
+ EXPECT_TRUE(recv_channel->RemoveRecvStream(recv_ssrc));
+}
+
+class WebRtcVideoChannelBaseTest
+ : public VideoMediaChannelTest<WebRtcVideoEngine, WebRtcVideoChannel> {
+ protected:
+ typedef VideoMediaChannelTest<WebRtcVideoEngine, WebRtcVideoChannel> Base;
+
+ cricket::VideoCodec GetEngineCodec(const std::string& name) {
+ for (const cricket::VideoCodec& engine_codec : engine_.codecs()) {
+ if (CodecNamesEq(name, engine_codec.name))
+ return engine_codec;
+ }
+ // This point should never be reached.
+ ADD_FAILURE() << "Unrecognized codec name: " << name;
+ return cricket::VideoCodec();
+ }
+
+ cricket::VideoCodec DefaultCodec() override { return GetEngineCodec("VP8"); }
+};
+
+// Verifies that id given in stream params is passed to the decoder factory.
+TEST_F(WebRtcVideoEngineTest, StreamParamsIdPassedToDecoderFactory) {
+ decoder_factory_->AddSupportedVideoCodecType(webrtc::kVideoCodecVP8);
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+
+ std::unique_ptr<VideoMediaChannel> channel(
+ SetUpForExternalDecoderFactory(parameters.codecs));
+
+ StreamParams sp = cricket::StreamParams::CreateLegacy(kSsrc);
+ sp.id = "FakeStreamParamsId";
+ EXPECT_TRUE(channel->AddRecvStream(sp));
+ EXPECT_EQ(1u, decoder_factory_->decoders().size());
+
+ std::vector<cricket::VideoDecoderParams> params = decoder_factory_->params();
+ ASSERT_EQ(1u, params.size());
+ EXPECT_EQ(sp.id, params[0].receive_stream_id);
+}
+
+TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) {
+ encoder_factory_->AddSupportedVideoCodecType("VP8");
+ std::unique_ptr<FakeCall> fake_call(
+ new FakeCall(webrtc::Call::Config(&event_log_)));
+ std::unique_ptr<VideoMediaChannel> channel(SetUpForExternalEncoderFactory());
+ ASSERT_TRUE(
+ channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
+ cricket::VideoCodec codec = GetEngineCodec("VP8");
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(codec);
+ channel->OnReadyToSend(true);
+ channel->SetSend(true);
+ ASSERT_TRUE(channel->SetSendParameters(parameters));
+
+ cricket::FakeVideoCapturer capturer;
+ VideoOptions options;
+ EXPECT_TRUE(channel->SetVideoSend(kSsrc, true, &options, &capturer));
+
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer.Start(capturer.GetSupportedFormats()->front()));
+ EXPECT_TRUE(capturer.CaptureFrame());
+ ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(1));
+ EXPECT_EQ(webrtc::kRealtimeVideo,
+ encoder_factory_->encoders().back()->GetCodecSettings().mode);
+
+ EXPECT_TRUE(channel->SetVideoSend(kSsrc, true, &options, &capturer));
+ EXPECT_TRUE(capturer.CaptureFrame());
+ // No change in content type, keep current encoder.
+ EXPECT_EQ(1, encoder_factory_->GetNumCreatedEncoders());
+
+ options.is_screencast.emplace(true);
+ EXPECT_TRUE(channel->SetVideoSend(kSsrc, true, &options, &capturer));
+ EXPECT_TRUE(capturer.CaptureFrame());
+ // Change to screen content, recreate encoder. For the simulcast encoder
+ // adapter case, this will result in two calls since InitEncode triggers a
+ // a new instance.
+ ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(2));
+ EXPECT_EQ(webrtc::kScreensharing,
+ encoder_factory_->encoders().back()->GetCodecSettings().mode);
+
+ EXPECT_TRUE(channel->SetVideoSend(kSsrc, true, &options, &capturer));
+ EXPECT_TRUE(capturer.CaptureFrame());
+ // Still screen content, no need to update encoder.
+ EXPECT_EQ(2, encoder_factory_->GetNumCreatedEncoders());
+
+ options.is_screencast.emplace(false);
+ options.video_noise_reduction.emplace(false);
+ EXPECT_TRUE(channel->SetVideoSend(kSsrc, true, &options, &capturer));
+ // Change back to regular video content, update encoder. Also change
+ // a non |is_screencast| option just to verify it doesn't affect recreation.
+ EXPECT_TRUE(capturer.CaptureFrame());
+ ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(3));
+ EXPECT_EQ(webrtc::kRealtimeVideo,
+ encoder_factory_->encoders().back()->GetCodecSettings().mode);
+
+ // Remove stream previously added to free the external encoder instance.
+ EXPECT_TRUE(channel->RemoveSendStream(kSsrc));
+ EXPECT_EQ(0u, encoder_factory_->encoders().size());
+}
+
+#define WEBRTC_BASE_TEST(test) \
+ TEST_F(WebRtcVideoChannelBaseTest, test) { Base::test(); }
+
+#define WEBRTC_DISABLED_BASE_TEST(test) \
+ TEST_F(WebRtcVideoChannelBaseTest, DISABLED_##test) { Base::test(); }
+
+WEBRTC_BASE_TEST(SetSend);
+WEBRTC_BASE_TEST(SetSendWithoutCodecs);
+WEBRTC_BASE_TEST(SetSendSetsTransportBufferSizes);
+
+WEBRTC_BASE_TEST(GetStats);
+WEBRTC_BASE_TEST(GetStatsMultipleRecvStreams);
+WEBRTC_BASE_TEST(GetStatsMultipleSendStreams);
+
+WEBRTC_BASE_TEST(SetSendBandwidth);
+
+WEBRTC_BASE_TEST(SetSendSsrc);
+WEBRTC_BASE_TEST(SetSendSsrcAfterSetCodecs);
+
+WEBRTC_BASE_TEST(SetSink);
+
+WEBRTC_BASE_TEST(AddRemoveSendStreams);
+
+WEBRTC_BASE_TEST(SimulateConference);
+
+WEBRTC_DISABLED_BASE_TEST(AddRemoveCapturer);
+
+WEBRTC_BASE_TEST(RemoveCapturerWithoutAdd);
+
+WEBRTC_BASE_TEST(AddRemoveCapturerMultipleSources);
+
+WEBRTC_BASE_TEST(RejectEmptyStreamParams);
+
+WEBRTC_BASE_TEST(MultipleSendStreams);
+
+TEST_F(WebRtcVideoChannelBaseTest, SendAndReceiveVp8Vga) {
+ SendAndReceive(GetEngineCodec("VP8"));
+}
+
+TEST_F(WebRtcVideoChannelBaseTest, SendAndReceiveVp8Qvga) {
+ SendAndReceive(GetEngineCodec("VP8"));
+}
+
+TEST_F(WebRtcVideoChannelBaseTest, SendAndReceiveVp8SvcQqvga) {
+ SendAndReceive(GetEngineCodec("VP8"));
+}
+
+TEST_F(WebRtcVideoChannelBaseTest, TwoStreamsSendAndReceive) {
+ // Set a high bitrate to not be downscaled by VP8 due to low initial start
+ // bitrates. This currently happens at <250k, and two streams sharing 300k
+ // initially will use QVGA instead of VGA.
+ // TODO(pbos): Set up the quality scaler so that both senders reliably start
+ // at QVGA, then verify that instead.
+ cricket::VideoCodec codec = GetEngineCodec("VP8");
+ codec.params[kCodecParamStartBitrate] = "1000000";
+ Base::TwoStreamsSendAndReceive(codec);
+}
+
+class WebRtcVideoChannelTest : public WebRtcVideoEngineTest {
+ public:
+ WebRtcVideoChannelTest() : WebRtcVideoChannelTest("") {}
+ explicit WebRtcVideoChannelTest(const char* field_trials)
+ : WebRtcVideoEngineTest(field_trials), last_ssrc_(0) {}
+ void SetUp() override {
+ fake_call_.reset(new FakeCall(webrtc::Call::Config(&event_log_)));
+ channel_.reset(engine_.CreateChannel(fake_call_.get(), GetMediaConfig(),
+ VideoOptions()));
+ channel_->OnReadyToSend(true);
+ last_ssrc_ = 123;
+ send_parameters_.codecs = engine_.codecs();
+ recv_parameters_.codecs = engine_.codecs();
+ ASSERT_TRUE(channel_->SetSendParameters(send_parameters_));
+ }
+
+ protected:
+ FakeVideoSendStream* AddSendStream() {
+ return AddSendStream(StreamParams::CreateLegacy(++last_ssrc_));
+ }
+
+ FakeVideoSendStream* AddSendStream(const StreamParams& sp) {
+ size_t num_streams = fake_call_->GetVideoSendStreams().size();
+ EXPECT_TRUE(channel_->AddSendStream(sp));
+ std::vector<FakeVideoSendStream*> streams =
+ fake_call_->GetVideoSendStreams();
+ EXPECT_EQ(num_streams + 1, streams.size());
+ return streams[streams.size() - 1];
+ }
+
+ std::vector<FakeVideoSendStream*> GetFakeSendStreams() {
+ return fake_call_->GetVideoSendStreams();
+ }
+
+ FakeVideoReceiveStream* AddRecvStream() {
+ return AddRecvStream(StreamParams::CreateLegacy(++last_ssrc_));
+ }
+
+ FakeVideoReceiveStream* AddRecvStream(const StreamParams& sp) {
+ size_t num_streams = fake_call_->GetVideoReceiveStreams().size();
+ EXPECT_TRUE(channel_->AddRecvStream(sp));
+ std::vector<FakeVideoReceiveStream*> streams =
+ fake_call_->GetVideoReceiveStreams();
+ EXPECT_EQ(num_streams + 1, streams.size());
+ return streams[streams.size() - 1];
+ }
+
+ void SetSendCodecsShouldWorkForBitrates(const char* min_bitrate_kbps,
+ int expected_min_bitrate_bps,
+ const char* start_bitrate_kbps,
+ int expected_start_bitrate_bps,
+ const char* max_bitrate_kbps,
+ int expected_max_bitrate_bps) {
+ auto& codecs = send_parameters_.codecs;
+ codecs.clear();
+ codecs.push_back(GetEngineCodec("VP8"));
+ codecs[0].params[kCodecParamMinBitrate] = min_bitrate_kbps;
+ codecs[0].params[kCodecParamStartBitrate] = start_bitrate_kbps;
+ codecs[0].params[kCodecParamMaxBitrate] = max_bitrate_kbps;
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+
+ EXPECT_EQ(expected_min_bitrate_bps,
+ fake_call_->GetConfig().bitrate_config.min_bitrate_bps);
+ EXPECT_EQ(expected_start_bitrate_bps,
+ fake_call_->GetConfig().bitrate_config.start_bitrate_bps);
+ EXPECT_EQ(expected_max_bitrate_bps,
+ fake_call_->GetConfig().bitrate_config.max_bitrate_bps);
+ }
+
+ void TestSetSendRtpHeaderExtensions(const std::string& ext_uri) {
+ // Enable extension.
+ const int id = 1;
+ cricket::VideoSendParameters parameters = send_parameters_;
+ parameters.extensions.push_back(RtpExtension(ext_uri, id));
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ FakeVideoSendStream* send_stream =
+ AddSendStream(cricket::StreamParams::CreateLegacy(123));
+
+ // Verify the send extension id.
+ ASSERT_EQ(1u, send_stream->GetConfig().rtp.extensions.size());
+ EXPECT_EQ(id, send_stream->GetConfig().rtp.extensions[0].id);
+ EXPECT_EQ(ext_uri, send_stream->GetConfig().rtp.extensions[0].uri);
+ // Verify call with same set of extensions returns true.
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ // Verify that SetSendRtpHeaderExtensions doesn't implicitly add them for
+ // receivers.
+ EXPECT_TRUE(AddRecvStream(cricket::StreamParams::CreateLegacy(123))
+ ->GetConfig()
+ .rtp.extensions.empty());
+
+ // Verify that existing RTP header extensions can be removed.
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
+ send_stream = fake_call_->GetVideoSendStreams()[0];
+ EXPECT_TRUE(send_stream->GetConfig().rtp.extensions.empty());
+
+ // Verify that adding receive RTP header extensions adds them for existing
+ // streams.
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ send_stream = fake_call_->GetVideoSendStreams()[0];
+ ASSERT_EQ(1u, send_stream->GetConfig().rtp.extensions.size());
+ EXPECT_EQ(id, send_stream->GetConfig().rtp.extensions[0].id);
+ EXPECT_EQ(ext_uri, send_stream->GetConfig().rtp.extensions[0].uri);
+ }
+
+ void TestSetRecvRtpHeaderExtensions(const std::string& ext_uri) {
+ // Enable extension.
+ const int id = 1;
+ cricket::VideoRecvParameters parameters = recv_parameters_;
+ parameters.extensions.push_back(RtpExtension(ext_uri, id));
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ FakeVideoReceiveStream* recv_stream =
+ AddRecvStream(cricket::StreamParams::CreateLegacy(123));
+
+ // Verify the recv extension id.
+ ASSERT_EQ(1u, recv_stream->GetConfig().rtp.extensions.size());
+ EXPECT_EQ(id, recv_stream->GetConfig().rtp.extensions[0].id);
+ EXPECT_EQ(ext_uri, recv_stream->GetConfig().rtp.extensions[0].uri);
+ // Verify call with same set of extensions returns true.
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ // Verify that SetRecvRtpHeaderExtensions doesn't implicitly add them for
+ // senders.
+ EXPECT_TRUE(AddSendStream(cricket::StreamParams::CreateLegacy(123))
+ ->GetConfig()
+ .rtp.extensions.empty());
+
+ // Verify that existing RTP header extensions can be removed.
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size());
+ recv_stream = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_TRUE(recv_stream->GetConfig().rtp.extensions.empty());
+
+ // Verify that adding receive RTP header extensions adds them for existing
+ // streams.
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ recv_stream = fake_call_->GetVideoReceiveStreams()[0];
+ ASSERT_EQ(1u, recv_stream->GetConfig().rtp.extensions.size());
+ EXPECT_EQ(id, recv_stream->GetConfig().rtp.extensions[0].id);
+ EXPECT_EQ(ext_uri, recv_stream->GetConfig().rtp.extensions[0].uri);
+ }
+
+ void TestExtensionFilter(const std::vector<std::string>& extensions,
+ const std::string& expected_extension) {
+ cricket::VideoSendParameters parameters = send_parameters_;
+ int expected_id = -1;
+ int id = 1;
+ for (const std::string& extension : extensions) {
+ if (extension == expected_extension)
+ expected_id = id;
+ parameters.extensions.push_back(RtpExtension(extension, id++));
+ }
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ FakeVideoSendStream* send_stream =
+ AddSendStream(cricket::StreamParams::CreateLegacy(123));
+
+ // Verify that only one of them has been set, and that it is the one with
+ // highest priority (transport sequence number).
+ ASSERT_EQ(1u, send_stream->GetConfig().rtp.extensions.size());
+ EXPECT_EQ(expected_id, send_stream->GetConfig().rtp.extensions[0].id);
+ EXPECT_EQ(expected_extension,
+ send_stream->GetConfig().rtp.extensions[0].uri);
+ }
+
+ void TestDegradationPreference(bool resolution_scaling_enabled,
+ bool fps_scaling_enabled);
+
+ void TestCpuAdaptation(bool enable_overuse, bool is_screenshare);
+ void TestReceiverLocalSsrcConfiguration(bool receiver_first);
+ void TestReceiveUnsignaledSsrcPacket(uint8_t payload_type,
+ bool expect_created_receive_stream);
+
+ FakeVideoSendStream* SetDenoisingOption(
+ uint32_t ssrc,
+ cricket::FakeVideoCapturer* capturer,
+ bool enabled) {
+ cricket::VideoOptions options;
+ options.video_noise_reduction = enabled;
+ EXPECT_TRUE(channel_->SetVideoSend(ssrc, true, &options, capturer));
+ // Options only take effect on the next frame.
+ EXPECT_TRUE(capturer->CaptureFrame());
+
+ return fake_call_->GetVideoSendStreams().back();
+ }
+
+ FakeVideoSendStream* SetUpSimulcast(bool enabled, bool with_rtx) {
+ const int kRtxSsrcOffset = 0xDEADBEEF;
+ last_ssrc_ += 3;
+ std::vector<uint32_t> ssrcs;
+ std::vector<uint32_t> rtx_ssrcs;
+ uint32_t num_streams = enabled ? 3 : 1;
+ for (uint32_t i = 0; i < num_streams; ++i) {
+ uint32_t ssrc = last_ssrc_ + i;
+ ssrcs.push_back(ssrc);
+ if (with_rtx) {
+ rtx_ssrcs.push_back(ssrc + kRtxSsrcOffset);
+ }
+ }
+ if (with_rtx) {
+ return AddSendStream(
+ cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs));
+ }
+ return AddSendStream(CreateSimStreamParams("cname", ssrcs));
+ }
+
+ int GetMaxEncoderBitrate() {
+ std::vector<FakeVideoSendStream*> streams =
+ fake_call_->GetVideoSendStreams();
+ EXPECT_EQ(1u, streams.size());
+ FakeVideoSendStream* stream = streams[streams.size() - 1];
+ EXPECT_EQ(1, stream->GetEncoderConfig().number_of_streams);
+ return stream->GetVideoStreams()[0].max_bitrate_bps;
+ }
+
+ void SetAndExpectMaxBitrate(int global_max,
+ int stream_max,
+ int expected_encoder_bitrate) {
+ VideoSendParameters limited_send_params = send_parameters_;
+ limited_send_params.max_bandwidth_bps = global_max;
+ EXPECT_TRUE(channel_->SetSendParameters(limited_send_params));
+ webrtc::RtpParameters parameters =
+ channel_->GetRtpSendParameters(last_ssrc_);
+ EXPECT_EQ(1UL, parameters.encodings.size());
+ parameters.encodings[0].max_bitrate_bps = stream_max;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters));
+ // Read back the parameteres and verify they have the correct value
+ parameters = channel_->GetRtpSendParameters(last_ssrc_);
+ EXPECT_EQ(1UL, parameters.encodings.size());
+ EXPECT_EQ(stream_max, parameters.encodings[0].max_bitrate_bps);
+ // Verify that the new value propagated down to the encoder
+ EXPECT_EQ(expected_encoder_bitrate, GetMaxEncoderBitrate());
+ }
+
+ std::unique_ptr<FakeCall> fake_call_;
+ std::unique_ptr<VideoMediaChannel> channel_;
+ cricket::VideoSendParameters send_parameters_;
+ cricket::VideoRecvParameters recv_parameters_;
+ uint32_t last_ssrc_;
+};
+
+TEST_F(WebRtcVideoChannelTest, SetsSyncGroupFromSyncLabel) {
+ const uint32_t kVideoSsrc = 123;
+ const std::string kSyncLabel = "AvSyncLabel";
+
+ cricket::StreamParams sp = cricket::StreamParams::CreateLegacy(kVideoSsrc);
+ sp.sync_label = kSyncLabel;
+ EXPECT_TRUE(channel_->AddRecvStream(sp));
+
+ EXPECT_EQ(1, fake_call_->GetVideoReceiveStreams().size());
+ EXPECT_EQ(kSyncLabel,
+ fake_call_->GetVideoReceiveStreams()[0]->GetConfig().sync_group)
+ << "SyncGroup should be set based on sync_label";
+}
+
+TEST_F(WebRtcVideoChannelTest, RecvStreamWithSimAndRtx) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs = engine_.codecs();
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ EXPECT_TRUE(channel_->SetSend(true));
+ parameters.conference_mode = true;
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+
+ // Send side.
+ const std::vector<uint32_t> ssrcs = MAKE_VECTOR(kSsrcs1);
+ const std::vector<uint32_t> rtx_ssrcs = MAKE_VECTOR(kRtxSsrcs1);
+ FakeVideoSendStream* send_stream = AddSendStream(
+ cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs));
+
+ ASSERT_EQ(rtx_ssrcs.size(), send_stream->GetConfig().rtp.rtx.ssrcs.size());
+ for (size_t i = 0; i < rtx_ssrcs.size(); ++i)
+ EXPECT_EQ(rtx_ssrcs[i], send_stream->GetConfig().rtp.rtx.ssrcs[i]);
+
+ // Receiver side.
+ FakeVideoReceiveStream* recv_stream = AddRecvStream(
+ cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs));
+ EXPECT_FALSE(
+ recv_stream->GetConfig().rtp.rtx_associated_payload_types.empty());
+ EXPECT_TRUE(VerifyRtxReceiveAssociations(recv_stream->GetConfig()))
+ << "RTX should be mapped for all decoders/payload types.";
+ EXPECT_TRUE(HasRtxReceiveAssociation(recv_stream->GetConfig(),
+ GetEngineCodec("red").id))
+ << "RTX should be mapped for the RED payload type";
+
+ EXPECT_EQ(rtx_ssrcs[0], recv_stream->GetConfig().rtp.rtx_ssrc);
+}
+
+TEST_F(WebRtcVideoChannelTest, RecvStreamWithRtx) {
+ // Setup one channel with an associated RTX stream.
+ cricket::StreamParams params =
+ cricket::StreamParams::CreateLegacy(kSsrcs1[0]);
+ params.AddFidSsrc(kSsrcs1[0], kRtxSsrcs1[0]);
+ FakeVideoReceiveStream* recv_stream = AddRecvStream(params);
+ EXPECT_EQ(kRtxSsrcs1[0], recv_stream->GetConfig().rtp.rtx_ssrc);
+
+ EXPECT_TRUE(VerifyRtxReceiveAssociations(recv_stream->GetConfig()))
+ << "RTX should be mapped for all decoders/payload types.";
+ EXPECT_TRUE(HasRtxReceiveAssociation(recv_stream->GetConfig(),
+ GetEngineCodec("red").id))
+ << "RTX should be mapped for the RED payload type";
+}
+
+TEST_F(WebRtcVideoChannelTest, RecvStreamNoRtx) {
+ // Setup one channel without an associated RTX stream.
+ cricket::StreamParams params =
+ cricket::StreamParams::CreateLegacy(kSsrcs1[0]);
+ FakeVideoReceiveStream* recv_stream = AddRecvStream(params);
+ ASSERT_EQ(0U, recv_stream->GetConfig().rtp.rtx_ssrc);
+}
+
+TEST_F(WebRtcVideoChannelTest, NoHeaderExtesionsByDefault) {
+ FakeVideoSendStream* send_stream =
+ AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcs1[0]));
+ ASSERT_TRUE(send_stream->GetConfig().rtp.extensions.empty());
+
+ FakeVideoReceiveStream* recv_stream =
+ AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrcs1[0]));
+ ASSERT_TRUE(recv_stream->GetConfig().rtp.extensions.empty());
+}
+
+// Test support for RTP timestamp offset header extension.
+TEST_F(WebRtcVideoChannelTest, SendRtpTimestampOffsetHeaderExtensions) {
+ TestSetSendRtpHeaderExtensions(RtpExtension::kTimestampOffsetUri);
+}
+
+TEST_F(WebRtcVideoChannelTest, RecvRtpTimestampOffsetHeaderExtensions) {
+ TestSetRecvRtpHeaderExtensions(RtpExtension::kTimestampOffsetUri);
+}
+
+// Test support for absolute send time header extension.
+TEST_F(WebRtcVideoChannelTest, SendAbsoluteSendTimeHeaderExtensions) {
+ TestSetSendRtpHeaderExtensions(RtpExtension::kAbsSendTimeUri);
+}
+
+TEST_F(WebRtcVideoChannelTest, RecvAbsoluteSendTimeHeaderExtensions) {
+ TestSetRecvRtpHeaderExtensions(RtpExtension::kAbsSendTimeUri);
+}
+
+TEST_F(WebRtcVideoChannelTest, FiltersExtensionsPicksTransportSeqNum) {
+ // Enable three redundant extensions.
+ std::vector<std::string> extensions;
+ extensions.push_back(RtpExtension::kAbsSendTimeUri);
+ extensions.push_back(RtpExtension::kTimestampOffsetUri);
+ extensions.push_back(RtpExtension::kTransportSequenceNumberUri);
+ TestExtensionFilter(extensions, RtpExtension::kTransportSequenceNumberUri);
+}
+
+TEST_F(WebRtcVideoChannelTest, FiltersExtensionsPicksAbsSendTime) {
+ // Enable two redundant extensions.
+ std::vector<std::string> extensions;
+ extensions.push_back(RtpExtension::kAbsSendTimeUri);
+ extensions.push_back(RtpExtension::kTimestampOffsetUri);
+ TestExtensionFilter(extensions, RtpExtension::kAbsSendTimeUri);
+}
+
+// Test support for transport sequence number header extension.
+TEST_F(WebRtcVideoChannelTest, SendTransportSequenceNumberHeaderExtensions) {
+ TestSetSendRtpHeaderExtensions(RtpExtension::kTransportSequenceNumberUri);
+}
+TEST_F(WebRtcVideoChannelTest, RecvTransportSequenceNumberHeaderExtensions) {
+ TestSetRecvRtpHeaderExtensions(RtpExtension::kTransportSequenceNumberUri);
+}
+
+// Test support for video rotation header extension.
+TEST_F(WebRtcVideoChannelTest, SendVideoRotationHeaderExtensions) {
+ TestSetSendRtpHeaderExtensions(RtpExtension::kVideoRotationUri);
+}
+TEST_F(WebRtcVideoChannelTest, RecvVideoRotationHeaderExtensions) {
+ TestSetRecvRtpHeaderExtensions(RtpExtension::kVideoRotationUri);
+}
+
+TEST_F(WebRtcVideoChannelTest, IdenticalSendExtensionsDoesntRecreateStream) {
+ const int kAbsSendTimeId = 1;
+ const int kVideoRotationId = 2;
+ send_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeId));
+ send_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kVideoRotationUri, kVideoRotationId));
+
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ FakeVideoSendStream* send_stream =
+ AddSendStream(cricket::StreamParams::CreateLegacy(123));
+
+ EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams());
+ ASSERT_EQ(2u, send_stream->GetConfig().rtp.extensions.size());
+
+ // Setting the same extensions (even if in different order) shouldn't
+ // reallocate the stream.
+ std::reverse(send_parameters_.extensions.begin(),
+ send_parameters_.extensions.end());
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+
+ EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams());
+
+ // Setting different extensions should recreate the stream.
+ send_parameters_.extensions.resize(1);
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+
+ EXPECT_EQ(2, fake_call_->GetNumCreatedSendStreams());
+}
+
+TEST_F(WebRtcVideoChannelTest, IdenticalRecvExtensionsDoesntRecreateStream) {
+ const int kTOffsetId = 1;
+ const int kAbsSendTimeId = 2;
+ const int kVideoRotationId = 3;
+ recv_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeId));
+ recv_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kTimestampOffsetUri, kTOffsetId));
+ recv_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kVideoRotationUri, kVideoRotationId));
+
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ FakeVideoReceiveStream* recv_stream =
+ AddRecvStream(cricket::StreamParams::CreateLegacy(123));
+
+ EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams());
+ ASSERT_EQ(3u, recv_stream->GetConfig().rtp.extensions.size());
+
+ // Setting the same extensions (even if in different order) shouldn't
+ // reallocate the stream.
+ std::reverse(recv_parameters_.extensions.begin(),
+ recv_parameters_.extensions.end());
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+
+ EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams());
+
+ // Setting different extensions should recreate the stream.
+ recv_parameters_.extensions.resize(1);
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+
+ EXPECT_EQ(2, fake_call_->GetNumCreatedReceiveStreams());
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ SetSendRtpHeaderExtensionsExcludeUnsupportedExtensions) {
+ const int kUnsupportedId = 1;
+ const int kTOffsetId = 2;
+
+ send_parameters_.extensions.push_back(
+ RtpExtension(kUnsupportedExtensionName, kUnsupportedId));
+ send_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kTimestampOffsetUri, kTOffsetId));
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ FakeVideoSendStream* send_stream =
+ AddSendStream(cricket::StreamParams::CreateLegacy(123));
+
+ // Only timestamp offset extension is set to send stream,
+ // unsupported rtp extension is ignored.
+ ASSERT_EQ(1u, send_stream->GetConfig().rtp.extensions.size());
+ EXPECT_STREQ(RtpExtension::kTimestampOffsetUri,
+ send_stream->GetConfig().rtp.extensions[0].uri.c_str());
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ SetRecvRtpHeaderExtensionsExcludeUnsupportedExtensions) {
+ const int kUnsupportedId = 1;
+ const int kTOffsetId = 2;
+
+ recv_parameters_.extensions.push_back(
+ RtpExtension(kUnsupportedExtensionName, kUnsupportedId));
+ recv_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kTimestampOffsetUri, kTOffsetId));
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ FakeVideoReceiveStream* recv_stream =
+ AddRecvStream(cricket::StreamParams::CreateLegacy(123));
+
+ // Only timestamp offset extension is set to receive stream,
+ // unsupported rtp extension is ignored.
+ ASSERT_EQ(1u, recv_stream->GetConfig().rtp.extensions.size());
+ EXPECT_STREQ(RtpExtension::kTimestampOffsetUri,
+ recv_stream->GetConfig().rtp.extensions[0].uri.c_str());
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSendRtpHeaderExtensionsRejectsIncorrectIds) {
+ const int kIncorrectIds[] = {-2, -1, 0, 15, 16};
+ for (size_t i = 0; i < arraysize(kIncorrectIds); ++i) {
+ send_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kTimestampOffsetUri, kIncorrectIds[i]));
+ EXPECT_FALSE(channel_->SetSendParameters(send_parameters_))
+ << "Bad extension id '" << kIncorrectIds[i] << "' accepted.";
+ }
+}
+
+TEST_F(WebRtcVideoChannelTest, SetRecvRtpHeaderExtensionsRejectsIncorrectIds) {
+ const int kIncorrectIds[] = {-2, -1, 0, 15, 16};
+ for (size_t i = 0; i < arraysize(kIncorrectIds); ++i) {
+ recv_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kTimestampOffsetUri, kIncorrectIds[i]));
+ EXPECT_FALSE(channel_->SetRecvParameters(recv_parameters_))
+ << "Bad extension id '" << kIncorrectIds[i] << "' accepted.";
+ }
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSendRtpHeaderExtensionsRejectsDuplicateIds) {
+ const int id = 1;
+ send_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kTimestampOffsetUri, id));
+ send_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kAbsSendTimeUri, id));
+ EXPECT_FALSE(channel_->SetSendParameters(send_parameters_));
+
+ // Duplicate entries are also not supported.
+ send_parameters_.extensions.clear();
+ send_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kTimestampOffsetUri, id));
+ send_parameters_.extensions.push_back(send_parameters_.extensions.back());
+ EXPECT_FALSE(channel_->SetSendParameters(send_parameters_));
+}
+
+TEST_F(WebRtcVideoChannelTest, SetRecvRtpHeaderExtensionsRejectsDuplicateIds) {
+ const int id = 1;
+ recv_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kTimestampOffsetUri, id));
+ recv_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kAbsSendTimeUri, id));
+ EXPECT_FALSE(channel_->SetRecvParameters(recv_parameters_));
+
+ // Duplicate entries are also not supported.
+ recv_parameters_.extensions.clear();
+ recv_parameters_.extensions.push_back(
+ RtpExtension(RtpExtension::kTimestampOffsetUri, id));
+ recv_parameters_.extensions.push_back(recv_parameters_.extensions.back());
+ EXPECT_FALSE(channel_->SetRecvParameters(recv_parameters_));
+}
+
+TEST_F(WebRtcVideoChannelTest, AddRecvStreamOnlyUsesOneReceiveStream) {
+ EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1)));
+ EXPECT_EQ(1u, fake_call_->GetVideoReceiveStreams().size());
+}
+
+TEST_F(WebRtcVideoChannelTest, RtcpIsCompoundByDefault) {
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ EXPECT_EQ(webrtc::RtcpMode::kCompound, stream->GetConfig().rtp.rtcp_mode);
+}
+
+TEST_F(WebRtcVideoChannelTest, RembIsEnabledByDefault) {
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ EXPECT_TRUE(stream->GetConfig().rtp.remb);
+}
+
+TEST_F(WebRtcVideoChannelTest, TransportCcIsEnabledByDefault) {
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ EXPECT_TRUE(stream->GetConfig().rtp.transport_cc);
+}
+
+TEST_F(WebRtcVideoChannelTest, RembCanBeEnabledAndDisabled) {
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ EXPECT_TRUE(stream->GetConfig().rtp.remb);
+
+ // Verify that REMB is turned off when send(!) codecs without REMB are set.
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(RemoveFeedbackParams(GetEngineCodec("VP8")));
+ EXPECT_TRUE(parameters.codecs[0].feedback_params.params().empty());
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ stream = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_FALSE(stream->GetConfig().rtp.remb);
+
+ // Verify that REMB is turned on when setting default codecs since the
+ // default codecs have REMB enabled.
+ parameters.codecs = engine_.codecs();
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ stream = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_TRUE(stream->GetConfig().rtp.remb);
+}
+
+TEST_F(WebRtcVideoChannelTest, TransportCcCanBeEnabledAndDisabled) {
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ EXPECT_TRUE(stream->GetConfig().rtp.transport_cc);
+
+ // Verify that transport cc feedback is turned off when send(!) codecs without
+ // transport cc feedback are set.
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(RemoveFeedbackParams(GetEngineCodec("VP8")));
+ EXPECT_TRUE(parameters.codecs[0].feedback_params.params().empty());
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ stream = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_FALSE(stream->GetConfig().rtp.transport_cc);
+
+ // Verify that transport cc feedback is turned on when setting default codecs
+ // since the default codecs have transport cc feedback enabled.
+ parameters.codecs = engine_.codecs();
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ stream = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_TRUE(stream->GetConfig().rtp.transport_cc);
+}
+
+TEST_F(WebRtcVideoChannelTest, NackIsEnabledByDefault) {
+ VerifyCodecHasDefaultFeedbackParams(default_codec_);
+
+ cricket::VideoSendParameters parameters;
+ parameters.codecs = engine_.codecs();
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ EXPECT_TRUE(channel_->SetSend(true));
+
+ // Send side.
+ FakeVideoSendStream* send_stream =
+ AddSendStream(cricket::StreamParams::CreateLegacy(1));
+ EXPECT_GT(send_stream->GetConfig().rtp.nack.rtp_history_ms, 0);
+
+ // Receiver side.
+ FakeVideoReceiveStream* recv_stream =
+ AddRecvStream(cricket::StreamParams::CreateLegacy(1));
+ EXPECT_GT(recv_stream->GetConfig().rtp.nack.rtp_history_ms, 0);
+
+ // Nack history size should match between sender and receiver.
+ EXPECT_EQ(send_stream->GetConfig().rtp.nack.rtp_history_ms,
+ recv_stream->GetConfig().rtp.nack.rtp_history_ms);
+}
+
+TEST_F(WebRtcVideoChannelTest, NackCanBeEnabledAndDisabled) {
+ FakeVideoSendStream* send_stream = AddSendStream();
+ FakeVideoReceiveStream* recv_stream = AddRecvStream();
+
+ EXPECT_GT(recv_stream->GetConfig().rtp.nack.rtp_history_ms, 0);
+ EXPECT_GT(send_stream->GetConfig().rtp.nack.rtp_history_ms, 0);
+
+ // Verify that NACK is turned off when send(!) codecs without NACK are set.
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(RemoveFeedbackParams(GetEngineCodec("VP8")));
+ EXPECT_TRUE(parameters.codecs[0].feedback_params.params().empty());
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ recv_stream = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_EQ(0, recv_stream->GetConfig().rtp.nack.rtp_history_ms);
+ send_stream = fake_call_->GetVideoSendStreams()[0];
+ EXPECT_EQ(0, send_stream->GetConfig().rtp.nack.rtp_history_ms);
+
+ // Verify that NACK is turned on when setting default codecs since the
+ // default codecs have NACK enabled.
+ parameters.codecs = engine_.codecs();
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ recv_stream = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_GT(recv_stream->GetConfig().rtp.nack.rtp_history_ms, 0);
+ send_stream = fake_call_->GetVideoSendStreams()[0];
+ EXPECT_GT(send_stream->GetConfig().rtp.nack.rtp_history_ms, 0);
+}
+
+// This test verifies that new frame sizes reconfigures encoders even though not
+// (yet) sending. The purpose of this is to permit encoding as quickly as
+// possible once we start sending. Likely the frames being input are from the
+// same source that will be sent later, which just means that we're ready
+// earlier.
+TEST_F(WebRtcVideoChannelTest, ReconfiguresEncodersWhenNotSending) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+ channel_->SetSend(false);
+
+ FakeVideoSendStream* stream = AddSendStream();
+
+ // No frames entered.
+ std::vector<webrtc::VideoStream> streams = stream->GetVideoStreams();
+ EXPECT_EQ(0u, streams[0].width);
+ EXPECT_EQ(0u, streams[0].height);
+
+ cricket::FakeVideoCapturer capturer;
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, &capturer));
+ VideoFormat capture_format = capturer.GetSupportedFormats()->front();
+ EXPECT_EQ(cricket::CS_RUNNING, capturer.Start(capture_format));
+ EXPECT_TRUE(capturer.CaptureFrame());
+
+ // Frame entered, should be reconfigured to new dimensions.
+ streams = stream->GetVideoStreams();
+ EXPECT_EQ(capture_format.width, streams[0].width);
+ EXPECT_EQ(capture_format.height, streams[0].height);
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+}
+
+TEST_F(WebRtcVideoChannelTest, UsesCorrectSettingsForScreencast) {
+ static const int kScreenshareMinBitrateKbps = 800;
+ cricket::VideoCodec codec = GetEngineCodec("VP8");
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(codec);
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ AddSendStream();
+
+ cricket::FakeVideoCapturer capturer;
+ VideoOptions min_bitrate_options;
+ min_bitrate_options.screencast_min_bitrate_kbps = kScreenshareMinBitrateKbps;
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &min_bitrate_options,
+ &capturer));
+ cricket::VideoFormat capture_format_hd =
+ capturer.GetSupportedFormats()->front();
+ EXPECT_EQ(1280, capture_format_hd.width);
+ EXPECT_EQ(720, capture_format_hd.height);
+ EXPECT_EQ(cricket::CS_RUNNING, capturer.Start(capture_format_hd));
+
+ EXPECT_TRUE(channel_->SetSend(true));
+
+ EXPECT_TRUE(capturer.CaptureFrame());
+ ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
+ FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
+
+ EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
+
+ // Verify non-screencast settings.
+ webrtc::VideoEncoderConfig encoder_config =
+ send_stream->GetEncoderConfig().Copy();
+ EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo,
+ encoder_config.content_type);
+ std::vector<webrtc::VideoStream> streams = send_stream->GetVideoStreams();
+ EXPECT_EQ(capture_format_hd.width, streams.front().width);
+ EXPECT_EQ(capture_format_hd.height, streams.front().height);
+ EXPECT_EQ(0, encoder_config.min_transmit_bitrate_bps)
+ << "Non-screenshare shouldn't use min-transmit bitrate.";
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+ EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
+ VideoOptions screencast_options;
+ screencast_options.is_screencast = true;
+ EXPECT_TRUE(
+ channel_->SetVideoSend(last_ssrc_, true, &screencast_options, &capturer));
+ EXPECT_TRUE(capturer.CaptureFrame());
+ // Send stream recreated after option change.
+ ASSERT_EQ(2, fake_call_->GetNumCreatedSendStreams());
+ send_stream = fake_call_->GetVideoSendStreams().front();
+ EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
+
+ // Verify screencast settings.
+ encoder_config = send_stream->GetEncoderConfig().Copy();
+ EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kScreen,
+ encoder_config.content_type);
+ EXPECT_EQ(kScreenshareMinBitrateKbps * 1000,
+ encoder_config.min_transmit_bitrate_bps);
+
+ streams = send_stream->GetVideoStreams();
+ EXPECT_EQ(capture_format_hd.width, streams.front().width);
+ EXPECT_EQ(capture_format_hd.height, streams.front().height);
+ EXPECT_TRUE(streams[0].temporal_layer_thresholds_bps.empty());
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ ConferenceModeScreencastConfiguresTemporalLayer) {
+ static const int kConferenceScreencastTemporalBitrateBps =
+ ScreenshareLayerConfig::GetDefault().tl0_bitrate_kbps * 1000;
+ send_parameters_.conference_mode = true;
+ channel_->SetSendParameters(send_parameters_);
+
+ AddSendStream();
+ VideoOptions options;
+ options.is_screencast = true;
+ cricket::FakeVideoCapturer capturer;
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options, &capturer));
+ cricket::VideoFormat capture_format_hd =
+ capturer.GetSupportedFormats()->front();
+ EXPECT_EQ(cricket::CS_RUNNING, capturer.Start(capture_format_hd));
+
+ EXPECT_TRUE(channel_->SetSend(true));
+
+ EXPECT_TRUE(capturer.CaptureFrame());
+ ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
+ FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
+
+ webrtc::VideoEncoderConfig encoder_config =
+ send_stream->GetEncoderConfig().Copy();
+
+ // Verify screencast settings.
+ encoder_config = send_stream->GetEncoderConfig().Copy();
+ EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kScreen,
+ encoder_config.content_type);
+
+ std::vector<webrtc::VideoStream> streams = send_stream->GetVideoStreams();
+ ASSERT_EQ(1u, streams.size());
+ ASSERT_EQ(1u, streams[0].temporal_layer_thresholds_bps.size());
+ EXPECT_EQ(kConferenceScreencastTemporalBitrateBps,
+ streams[0].temporal_layer_thresholds_bps[0]);
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+}
+
+TEST_F(WebRtcVideoChannelTest, SuspendBelowMinBitrateDisabledByDefault) {
+ FakeVideoSendStream* stream = AddSendStream();
+ EXPECT_FALSE(stream->GetConfig().suspend_below_min_bitrate);
+}
+
+TEST_F(WebRtcVideoChannelTest, SetMediaConfigSuspendBelowMinBitrate) {
+ MediaConfig media_config = GetMediaConfig();
+ media_config.video.suspend_below_min_bitrate = true;
+
+ channel_.reset(
+ engine_.CreateChannel(fake_call_.get(), media_config, VideoOptions()));
+ channel_->OnReadyToSend(true);
+
+ channel_->SetSendParameters(send_parameters_);
+
+ FakeVideoSendStream* stream = AddSendStream();
+ EXPECT_TRUE(stream->GetConfig().suspend_below_min_bitrate);
+
+ media_config.video.suspend_below_min_bitrate = false;
+ channel_.reset(
+ engine_.CreateChannel(fake_call_.get(), media_config, VideoOptions()));
+ channel_->OnReadyToSend(true);
+
+ channel_->SetSendParameters(send_parameters_);
+
+ stream = AddSendStream();
+ EXPECT_FALSE(stream->GetConfig().suspend_below_min_bitrate);
+}
+
+TEST_F(WebRtcVideoChannelTest, Vp8DenoisingEnabledByDefault) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoCodecVP8 vp8_settings;
+ ASSERT_TRUE(stream->GetVp8Settings(&vp8_settings)) << "No VP8 config set.";
+ EXPECT_TRUE(vp8_settings.denoisingOn);
+}
+
+TEST_F(WebRtcVideoChannelTest, VerifyVp8SpecificSettings) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+
+ // Single-stream settings should apply with RTX as well (verifies that we
+ // check number of regular SSRCs and not StreamParams::ssrcs which contains
+ // both RTX and regular SSRCs).
+ FakeVideoSendStream* stream = SetUpSimulcast(false, true);
+
+ cricket::FakeVideoCapturer capturer;
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer.Start(capturer.GetSupportedFormats()->front()));
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, &capturer));
+ channel_->SetSend(true);
+
+ EXPECT_TRUE(capturer.CaptureFrame());
+
+ webrtc::VideoCodecVP8 vp8_settings;
+ ASSERT_TRUE(stream->GetVp8Settings(&vp8_settings)) << "No VP8 config set.";
+ EXPECT_TRUE(vp8_settings.denoisingOn)
+ << "VP8 denoising should be on by default.";
+
+ stream = SetDenoisingOption(last_ssrc_, &capturer, false);
+
+ ASSERT_TRUE(stream->GetVp8Settings(&vp8_settings)) << "No VP8 config set.";
+ EXPECT_FALSE(vp8_settings.denoisingOn);
+ EXPECT_TRUE(vp8_settings.automaticResizeOn);
+ EXPECT_TRUE(vp8_settings.frameDroppingOn);
+
+ stream = SetDenoisingOption(last_ssrc_, &capturer, true);
+
+ ASSERT_TRUE(stream->GetVp8Settings(&vp8_settings)) << "No VP8 config set.";
+ EXPECT_TRUE(vp8_settings.denoisingOn);
+ EXPECT_TRUE(vp8_settings.automaticResizeOn);
+ EXPECT_TRUE(vp8_settings.frameDroppingOn);
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+ stream = SetUpSimulcast(true, false);
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, &capturer));
+ channel_->SetSend(true);
+ EXPECT_TRUE(capturer.CaptureFrame());
+
+ EXPECT_EQ(3, stream->GetVideoStreams().size());
+ ASSERT_TRUE(stream->GetVp8Settings(&vp8_settings)) << "No VP8 config set.";
+ // Autmatic resize off when using simulcast.
+ EXPECT_FALSE(vp8_settings.automaticResizeOn);
+ EXPECT_TRUE(vp8_settings.frameDroppingOn);
+
+ // In screen-share mode, denoising is forced off and simulcast disabled.
+ VideoOptions options;
+ options.is_screencast = true;
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options, &capturer));
+
+ stream = SetDenoisingOption(last_ssrc_, &capturer, false);
+
+ EXPECT_EQ(1, stream->GetVideoStreams().size());
+ ASSERT_TRUE(stream->GetVp8Settings(&vp8_settings)) << "No VP8 config set.";
+ EXPECT_FALSE(vp8_settings.denoisingOn);
+ // Resizing and frame dropping always off for screen sharing.
+ EXPECT_FALSE(vp8_settings.automaticResizeOn);
+ EXPECT_FALSE(vp8_settings.frameDroppingOn);
+
+ stream = SetDenoisingOption(last_ssrc_, &capturer, true);
+
+ ASSERT_TRUE(stream->GetVp8Settings(&vp8_settings)) << "No VP8 config set.";
+ EXPECT_FALSE(vp8_settings.denoisingOn);
+ EXPECT_FALSE(vp8_settings.automaticResizeOn);
+ EXPECT_FALSE(vp8_settings.frameDroppingOn);
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+}
+
+// Test that setting the same options doesn't result in the encoder being
+// reconfigured.
+TEST_F(WebRtcVideoChannelTest, SetIdenticalOptionsDoesntReconfigureEncoder) {
+ VideoOptions options;
+ cricket::FakeVideoCapturer capturer;
+
+ AddSendStream();
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer.Start(capturer.GetSupportedFormats()->front()));
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+ FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options, &capturer));
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options, &capturer));
+ EXPECT_TRUE(capturer.CaptureFrame());
+ // Expect 1 reconfigurations at this point from the initial configuration.
+ EXPECT_EQ(1, send_stream->num_encoder_reconfigurations());
+
+ // Set the options one more time and expect no additional reconfigurations.
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options, &capturer));
+ EXPECT_EQ(1, send_stream->num_encoder_reconfigurations());
+
+ // Change |options| and expect 2 reconfigurations.
+ options.video_noise_reduction = true;
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options, &capturer));
+ EXPECT_EQ(2, send_stream->num_encoder_reconfigurations());
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+}
+
+class Vp9SettingsTest : public WebRtcVideoChannelTest {
+ public:
+ Vp9SettingsTest() : Vp9SettingsTest("") {}
+ explicit Vp9SettingsTest(const char* field_trials)
+ : WebRtcVideoChannelTest(field_trials) {
+ encoder_factory_->AddSupportedVideoCodecType("VP9");
+ }
+ virtual ~Vp9SettingsTest() {}
+
+ protected:
+ void TearDown() override {
+ // Remove references to encoder_factory_ since this will be destroyed
+ // before channel_ and engine_.
+ ASSERT_TRUE(channel_->SetSendParameters(send_parameters_));
+ }
+};
+
+TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP9"));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+
+ FakeVideoSendStream* stream = SetUpSimulcast(false, false);
+
+ cricket::FakeVideoCapturer capturer;
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer.Start(capturer.GetSupportedFormats()->front()));
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, &capturer));
+ channel_->SetSend(true);
+
+ EXPECT_TRUE(capturer.CaptureFrame());
+
+ webrtc::VideoCodecVP9 vp9_settings;
+ ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set.";
+ EXPECT_TRUE(vp9_settings.denoisingOn)
+ << "VP9 denoising should be on by default.";
+
+ stream = SetDenoisingOption(last_ssrc_, &capturer, false);
+
+ ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set.";
+ EXPECT_FALSE(vp9_settings.denoisingOn);
+ // Frame dropping always on for real time video.
+ EXPECT_TRUE(vp9_settings.frameDroppingOn);
+
+ stream = SetDenoisingOption(last_ssrc_, &capturer, true);
+
+ ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set.";
+ EXPECT_TRUE(vp9_settings.denoisingOn);
+ EXPECT_TRUE(vp9_settings.frameDroppingOn);
+
+ // In screen-share mode, denoising is forced off.
+ VideoOptions options;
+ options.is_screencast = true;
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options, &capturer));
+
+ stream = SetDenoisingOption(last_ssrc_, &capturer, false);
+
+ ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set.";
+ EXPECT_FALSE(vp9_settings.denoisingOn);
+ // Frame dropping always off for screen sharing.
+ EXPECT_FALSE(vp9_settings.frameDroppingOn);
+
+ stream = SetDenoisingOption(last_ssrc_, &capturer, false);
+
+ ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set.";
+ EXPECT_FALSE(vp9_settings.denoisingOn);
+ EXPECT_FALSE(vp9_settings.frameDroppingOn);
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+}
+
+class Vp9SettingsTestWithFieldTrial : public Vp9SettingsTest {
+ public:
+ explicit Vp9SettingsTestWithFieldTrial(const char* field_trials)
+ : Vp9SettingsTest(field_trials) {}
+
+ protected:
+ void VerifySettings(int num_spatial_layers, int num_temporal_layers) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP9"));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+
+ FakeVideoSendStream* stream = SetUpSimulcast(false, false);
+
+ cricket::FakeVideoCapturer capturer;
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer.Start(capturer.GetSupportedFormats()->front()));
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, &capturer));
+ channel_->SetSend(true);
+
+ EXPECT_TRUE(capturer.CaptureFrame());
+
+ webrtc::VideoCodecVP9 vp9_settings;
+ ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set.";
+ EXPECT_EQ(num_spatial_layers, vp9_settings.numberOfSpatialLayers);
+ EXPECT_EQ(num_temporal_layers, vp9_settings.numberOfTemporalLayers);
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+ }
+};
+
+class Vp9SettingsTestWithNoFlag : public Vp9SettingsTestWithFieldTrial {
+ public:
+ Vp9SettingsTestWithNoFlag() : Vp9SettingsTestWithFieldTrial("") {}
+};
+
+TEST_F(Vp9SettingsTestWithNoFlag, VerifySettings) {
+ const int kNumSpatialLayers = 1;
+ const int kNumTemporalLayers = 1;
+ VerifySettings(kNumSpatialLayers, kNumTemporalLayers);
+}
+
+class Vp9SettingsTestWithInvalidFlag : public Vp9SettingsTestWithFieldTrial {
+ public:
+ Vp9SettingsTestWithInvalidFlag()
+ : Vp9SettingsTestWithFieldTrial("WebRTC-SupportVP9SVC/Default/") {}
+};
+
+TEST_F(Vp9SettingsTestWithInvalidFlag, VerifySettings) {
+ const int kNumSpatialLayers = 1;
+ const int kNumTemporalLayers = 1;
+ VerifySettings(kNumSpatialLayers, kNumTemporalLayers);
+}
+
+class Vp9SettingsTestWith2SL3TLFlag : public Vp9SettingsTestWithFieldTrial {
+ public:
+ Vp9SettingsTestWith2SL3TLFlag()
+ : Vp9SettingsTestWithFieldTrial(
+ "WebRTC-SupportVP9SVC/EnabledByFlag_2SL3TL/") {}
+};
+
+TEST_F(Vp9SettingsTestWith2SL3TLFlag, VerifySettings) {
+ const int kNumSpatialLayers = 2;
+ const int kNumTemporalLayers = 3;
+ VerifySettings(kNumSpatialLayers, kNumTemporalLayers);
+}
+
+TEST_F(WebRtcVideoChannelTest, VerifyMinBitrate) {
+ std::vector<webrtc::VideoStream> streams = AddSendStream()->GetVideoStreams();
+ ASSERT_EQ(1u, streams.size());
+ EXPECT_EQ(cricket::kMinVideoBitrateBps, streams[0].min_bitrate_bps);
+}
+
+TEST_F(WebRtcVideoChannelTest, VerifyMinBitrateWithForcedFallbackFieldTrial) {
+ webrtc::test::ScopedFieldTrials override_field_trials_(
+ "WebRTC-VP8-Forced-Fallback-Encoder-v2/Enabled-1,2,34567/");
+ std::vector<webrtc::VideoStream> streams = AddSendStream()->GetVideoStreams();
+ ASSERT_EQ(1u, streams.size());
+ EXPECT_EQ(34567, streams[0].min_bitrate_bps);
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ BalancedDegradationPreferenceNotSupportedWithoutFieldtrial) {
+ webrtc::test::ScopedFieldTrials override_field_trials_(
+ "WebRTC-Video-BalancedDegradation/Disabled/");
+ const bool kResolutionScalingEnabled = true;
+ const bool kFpsScalingEnabled = false;
+ TestDegradationPreference(kResolutionScalingEnabled, kFpsScalingEnabled);
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ BalancedDegradationPreferenceSupportedBehindFieldtrial) {
+ webrtc::test::ScopedFieldTrials override_field_trials_(
+ "WebRTC-Video-BalancedDegradation/Enabled/");
+ const bool kResolutionScalingEnabled = true;
+ const bool kFpsScalingEnabled = true;
+ TestDegradationPreference(kResolutionScalingEnabled, kFpsScalingEnabled);
+}
+
+TEST_F(WebRtcVideoChannelTest, AdaptsOnOveruse) {
+ TestCpuAdaptation(true, false);
+}
+
+TEST_F(WebRtcVideoChannelTest, DoesNotAdaptOnOveruseWhenDisabled) {
+ TestCpuAdaptation(false, false);
+}
+
+TEST_F(WebRtcVideoChannelTest, DoesNotAdaptOnOveruseWhenScreensharing) {
+ TestCpuAdaptation(true, true);
+}
+
+TEST_F(WebRtcVideoChannelTest, AdaptsOnOveruseAndChangeResolution) {
+ cricket::VideoCodec codec = GetEngineCodec("VP8");
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(codec);
+
+ MediaConfig media_config = GetMediaConfig();
+ channel_.reset(
+ engine_.CreateChannel(fake_call_.get(), media_config, VideoOptions()));
+ channel_->OnReadyToSend(true);
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+
+ AddSendStream();
+
+ cricket::FakeVideoCapturer capturer;
+ ASSERT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, &capturer));
+ ASSERT_EQ(cricket::CS_RUNNING,
+ capturer.Start(capturer.GetSupportedFormats()->front()));
+ ASSERT_TRUE(channel_->SetSend(true));
+
+ ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
+ FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
+
+ EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
+ EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
+ EXPECT_EQ(1280, send_stream->GetLastWidth());
+ EXPECT_EQ(720, send_stream->GetLastHeight());
+
+ // Trigger overuse.
+ rtc::VideoSinkWants wants;
+ wants.max_pixel_count =
+ send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1;
+ send_stream->InjectVideoSinkWants(wants);
+ EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
+ EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
+ EXPECT_EQ(1280 * 3 / 4, send_stream->GetLastWidth());
+ EXPECT_EQ(720 * 3 / 4, send_stream->GetLastHeight());
+
+ // Trigger overuse again.
+ wants.max_pixel_count =
+ send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1;
+ send_stream->InjectVideoSinkWants(wants);
+ EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
+ EXPECT_EQ(3, send_stream->GetNumberOfSwappedFrames());
+ EXPECT_EQ(1280 * 2 / 4, send_stream->GetLastWidth());
+ EXPECT_EQ(720 * 2 / 4, send_stream->GetLastHeight());
+
+ // Change input resolution.
+ EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
+ EXPECT_EQ(4, send_stream->GetNumberOfSwappedFrames());
+ EXPECT_EQ(1284 / 2, send_stream->GetLastWidth());
+ EXPECT_EQ(724 / 2, send_stream->GetLastHeight());
+
+ // Trigger underuse which should go back up in resolution.
+ int current_pixel_count =
+ send_stream->GetLastWidth() * send_stream->GetLastHeight();
+ // Cap the max to 4x the pixel count (assuming max 1/2 x 1/2 scale downs)
+ // of the current stream, so we don't take too large steps.
+ wants.max_pixel_count = current_pixel_count * 4;
+ // Default step down is 3/5 pixel count, so go up by 5/3.
+ wants.target_pixel_count = (current_pixel_count * 5 / 3);
+ send_stream->InjectVideoSinkWants(wants);
+ EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
+ EXPECT_EQ(5, send_stream->GetNumberOfSwappedFrames());
+ EXPECT_EQ(1284 * 3 / 4, send_stream->GetLastWidth());
+ EXPECT_EQ(724 * 3 / 4, send_stream->GetLastHeight());
+
+ // Trigger underuse again, should go back up to full resolution.
+ current_pixel_count =
+ send_stream->GetLastWidth() * send_stream->GetLastHeight();
+ wants.max_pixel_count = current_pixel_count * 4;
+ wants.target_pixel_count = (current_pixel_count * 5 / 3);
+ send_stream->InjectVideoSinkWants(wants);
+ EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
+ EXPECT_EQ(6, send_stream->GetNumberOfSwappedFrames());
+ EXPECT_EQ(1284, send_stream->GetLastWidth());
+ EXPECT_EQ(724, send_stream->GetLastHeight());
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+}
+
+TEST_F(WebRtcVideoChannelTest, PreviousAdaptationDoesNotApplyToScreenshare) {
+ cricket::VideoCodec codec = GetEngineCodec("VP8");
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(codec);
+
+ MediaConfig media_config = GetMediaConfig();
+ media_config.video.enable_cpu_overuse_detection = true;
+ channel_.reset(
+ engine_.CreateChannel(fake_call_.get(), media_config, VideoOptions()));
+ channel_->OnReadyToSend(true);
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+
+ AddSendStream();
+
+ cricket::FakeVideoCapturer capturer;
+ ASSERT_EQ(cricket::CS_RUNNING,
+ capturer.Start(capturer.GetSupportedFormats()->front()));
+ ASSERT_TRUE(channel_->SetSend(true));
+ cricket::VideoOptions camera_options;
+ camera_options.is_screencast = false;
+ channel_->SetVideoSend(last_ssrc_, true /* enable */, &camera_options,
+ &capturer);
+
+ ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
+ FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
+
+ EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
+ EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
+ EXPECT_EQ(1280, send_stream->GetLastWidth());
+ EXPECT_EQ(720, send_stream->GetLastHeight());
+
+ // Trigger overuse.
+ rtc::VideoSinkWants wants;
+ wants.max_pixel_count =
+ send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1;
+ send_stream->InjectVideoSinkWants(wants);
+ EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
+ EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
+ EXPECT_EQ(1280 * 3 / 4, send_stream->GetLastWidth());
+ EXPECT_EQ(720 * 3 / 4, send_stream->GetLastHeight());
+
+ // Switch to screen share. Expect no CPU adaptation.
+ cricket::FakeVideoCapturer screen_share(true);
+ ASSERT_EQ(cricket::CS_RUNNING,
+ screen_share.Start(screen_share.GetSupportedFormats()->front()));
+ cricket::VideoOptions screenshare_options;
+ screenshare_options.is_screencast = true;
+ channel_->SetVideoSend(last_ssrc_, true /* enable */, &screenshare_options,
+ &screen_share);
+ EXPECT_TRUE(screen_share.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
+ ASSERT_EQ(2, fake_call_->GetNumCreatedSendStreams());
+ send_stream = fake_call_->GetVideoSendStreams().front();
+ EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
+ EXPECT_EQ(1284, send_stream->GetLastWidth());
+ EXPECT_EQ(724, send_stream->GetLastHeight());
+
+ // Switch back to the normal capturer. Expect the frame to be CPU adapted.
+ channel_->SetVideoSend(last_ssrc_, true /* enable */, &camera_options,
+ &capturer);
+ send_stream = fake_call_->GetVideoSendStreams().front();
+ // We have a new fake send stream, so it doesn't remember the old sink wants.
+ // In practice, it will be populated from
+ // VideoStreamEncoder::VideoSourceProxy::SetSource(), so simulate that here.
+ send_stream->InjectVideoSinkWants(wants);
+ EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
+ ASSERT_EQ(3, fake_call_->GetNumCreatedSendStreams());
+ send_stream = fake_call_->GetVideoSendStreams().front();
+ EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
+ EXPECT_EQ(1280 * 3 / 4, send_stream->GetLastWidth());
+ EXPECT_EQ(720 * 3 / 4, send_stream->GetLastHeight());
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+}
+
+// TODO(asapersson): Remove this test when the balanced field trial is removed.
+void WebRtcVideoChannelTest::TestDegradationPreference(
+ bool resolution_scaling_enabled,
+ bool fps_scaling_enabled) {
+ cricket::VideoCodec codec = GetEngineCodec("VP8");
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(codec);
+
+ MediaConfig media_config = GetMediaConfig();
+ media_config.video.enable_cpu_overuse_detection = true;
+ channel_.reset(
+ engine_.CreateChannel(fake_call_.get(), media_config, VideoOptions()));
+ channel_->OnReadyToSend(true);
+
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+
+ AddSendStream();
+
+ cricket::FakeVideoCapturer capturer;
+ VideoOptions options;
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options, &capturer));
+ cricket::VideoFormat capture_format = capturer.GetSupportedFormats()->front();
+ EXPECT_EQ(cricket::CS_RUNNING, capturer.Start(capture_format));
+
+ EXPECT_TRUE(channel_->SetSend(true));
+
+ FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
+ EXPECT_EQ(resolution_scaling_enabled,
+ send_stream->resolution_scaling_enabled());
+ EXPECT_EQ(fps_scaling_enabled, send_stream->framerate_scaling_enabled());
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+}
+
+void WebRtcVideoChannelTest::TestCpuAdaptation(bool enable_overuse,
+ bool is_screenshare) {
+ const int kDefaultFps = 30;
+ cricket::VideoCodec codec = GetEngineCodec("VP8");
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(codec);
+
+ MediaConfig media_config = GetMediaConfig();
+ if (enable_overuse) {
+ media_config.video.enable_cpu_overuse_detection = true;
+ }
+ channel_.reset(
+ engine_.CreateChannel(fake_call_.get(), media_config, VideoOptions()));
+ channel_->OnReadyToSend(true);
+
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+
+ AddSendStream();
+
+ cricket::FakeVideoCapturer capturer;
+ VideoOptions options;
+ options.is_screencast = is_screenshare;
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options, &capturer));
+ cricket::VideoFormat capture_format = capturer.GetSupportedFormats()->front();
+ capture_format.interval = rtc::kNumNanosecsPerSec / kDefaultFps;
+ EXPECT_EQ(cricket::CS_RUNNING, capturer.Start(capture_format));
+
+ EXPECT_TRUE(channel_->SetSend(true));
+
+ FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
+
+ if (!enable_overuse) {
+ EXPECT_FALSE(send_stream->resolution_scaling_enabled());
+ EXPECT_FALSE(send_stream->framerate_scaling_enabled());
+ EXPECT_EQ(is_screenshare, send_stream->framerate_scaling_enabled());
+
+ EXPECT_TRUE(capturer.CaptureFrame());
+ EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
+
+ EXPECT_EQ(capture_format.width, send_stream->GetLastWidth());
+ EXPECT_EQ(capture_format.height, send_stream->GetLastHeight());
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+ return;
+ }
+
+ if (is_screenshare) {
+ EXPECT_FALSE(send_stream->resolution_scaling_enabled());
+ EXPECT_TRUE(send_stream->framerate_scaling_enabled());
+ } else {
+ EXPECT_TRUE(send_stream->resolution_scaling_enabled());
+ EXPECT_FALSE(send_stream->framerate_scaling_enabled());
+ }
+
+ // Trigger overuse.
+ ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
+
+ rtc::VideoSinkWants wants;
+ if (is_screenshare) {
+ wants.max_framerate_fps = (kDefaultFps * 2) / 3;
+ } else {
+ wants.max_pixel_count = capture_format.width * capture_format.height - 1;
+ }
+ send_stream->InjectVideoSinkWants(wants);
+
+ for (int i = 0; i < kDefaultFps; ++i)
+ EXPECT_TRUE(capturer.CaptureFrame());
+
+ if (is_screenshare) {
+ // Drops every third frame.
+ EXPECT_EQ(kDefaultFps * 2 / 3, send_stream->GetNumberOfSwappedFrames());
+ EXPECT_EQ(send_stream->GetLastWidth(), capture_format.width);
+ EXPECT_EQ(send_stream->GetLastHeight(), capture_format.height);
+ } else {
+ EXPECT_EQ(kDefaultFps, send_stream->GetNumberOfSwappedFrames());
+ EXPECT_LT(send_stream->GetLastWidth(), capture_format.width);
+ EXPECT_LT(send_stream->GetLastHeight(), capture_format.height);
+ }
+
+ // Trigger underuse which should go back to normal resolution.
+ int last_pixel_count =
+ send_stream->GetLastWidth() * send_stream->GetLastHeight();
+ if (is_screenshare) {
+ wants.max_framerate_fps = kDefaultFps;
+ } else {
+ wants.max_pixel_count = last_pixel_count * 4;
+ wants.target_pixel_count.emplace((last_pixel_count * 5) / 3);
+ }
+ send_stream->InjectVideoSinkWants(wants);
+
+ for (int i = 0; i < kDefaultFps; ++i)
+ EXPECT_TRUE(capturer.CaptureFrame());
+
+ if (is_screenshare) {
+ EXPECT_EQ(kDefaultFps + (kDefaultFps * 2 / 3),
+ send_stream->GetNumberOfSwappedFrames());
+ } else {
+ EXPECT_EQ(kDefaultFps * 2, send_stream->GetNumberOfSwappedFrames());
+ }
+
+ EXPECT_EQ(capture_format.width, send_stream->GetLastWidth());
+ EXPECT_EQ(capture_format.height, send_stream->GetLastHeight());
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+}
+
+TEST_F(WebRtcVideoChannelTest, EstimatesNtpStartTimeCorrectly) {
+ // Start at last timestamp to verify that wraparounds are estimated correctly.
+ static const uint32_t kInitialTimestamp = 0xFFFFFFFFu;
+ static const int64_t kInitialNtpTimeMs = 1247891230;
+ static const int kFrameOffsetMs = 20;
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ cricket::FakeVideoRenderer renderer;
+ EXPECT_TRUE(channel_->SetSink(last_ssrc_, &renderer));
+
+ webrtc::VideoFrame video_frame(CreateBlackFrameBuffer(4, 4),
+ kInitialTimestamp, 0,
+ webrtc::kVideoRotation_0);
+ // Initial NTP time is not available on the first frame, but should still be
+ // able to be estimated.
+ stream->InjectFrame(video_frame);
+
+ EXPECT_EQ(1, renderer.num_rendered_frames());
+
+ // This timestamp is kInitialTimestamp (-1) + kFrameOffsetMs * 90, which
+ // triggers a constant-overflow warning, hence we're calculating it explicitly
+ // here.
+ video_frame.set_timestamp(kFrameOffsetMs * 90 - 1);
+ video_frame.set_ntp_time_ms(kInitialNtpTimeMs + kFrameOffsetMs);
+ stream->InjectFrame(video_frame);
+
+ EXPECT_EQ(2, renderer.num_rendered_frames());
+
+ // Verify that NTP time has been correctly deduced.
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(1u, info.receivers.size());
+ EXPECT_EQ(kInitialNtpTimeMs, info.receivers[0].capture_start_ntp_time_ms);
+}
+
+TEST_F(WebRtcVideoChannelTest, SetDefaultSendCodecs) {
+ ASSERT_TRUE(channel_->SetSendParameters(send_parameters_));
+
+ VideoCodec codec;
+ EXPECT_TRUE(channel_->GetSendCodec(&codec));
+ EXPECT_TRUE(codec.Matches(engine_.codecs()[0]));
+
+ // Using a RTX setup to verify that the default RTX payload type is good.
+ const std::vector<uint32_t> ssrcs = MAKE_VECTOR(kSsrcs1);
+ const std::vector<uint32_t> rtx_ssrcs = MAKE_VECTOR(kRtxSsrcs1);
+ FakeVideoSendStream* stream = AddSendStream(
+ cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs));
+ webrtc::VideoSendStream::Config config = stream->GetConfig().Copy();
+
+ // Make sure NACK and FEC are enabled on the correct payload types.
+ EXPECT_EQ(1000, config.rtp.nack.rtp_history_ms);
+ EXPECT_EQ(GetEngineCodec("ulpfec").id, config.rtp.ulpfec.ulpfec_payload_type);
+ EXPECT_EQ(GetEngineCodec("red").id, config.rtp.ulpfec.red_payload_type);
+
+ EXPECT_EQ(1u, config.rtp.rtx.ssrcs.size());
+ EXPECT_EQ(kRtxSsrcs1[0], config.rtp.rtx.ssrcs[0]);
+ VerifySendStreamHasRtxTypes(config, default_apt_rtx_types_);
+ // TODO(juberti): Check RTCP, PLI, TMMBR.
+}
+
+// The following four tests ensures that FlexFEC is not activated by default
+// when the field trials are not enabled.
+// TODO(brandtr): Remove or update these tests when FlexFEC _is_ enabled by
+// default.
+TEST_F(WebRtcVideoChannelTest,
+ FlexfecSendCodecWithoutSsrcNotExposedByDefault) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Config config = stream->GetConfig().Copy();
+
+ EXPECT_EQ(-1, config.rtp.flexfec.payload_type);
+ EXPECT_EQ(0U, config.rtp.flexfec.ssrc);
+ EXPECT_TRUE(config.rtp.flexfec.protected_media_ssrcs.empty());
+}
+
+TEST_F(WebRtcVideoChannelTest, FlexfecSendCodecWithSsrcNotExposedByDefault) {
+ FakeVideoSendStream* stream = AddSendStream(
+ CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc));
+ webrtc::VideoSendStream::Config config = stream->GetConfig().Copy();
+
+ EXPECT_EQ(-1, config.rtp.flexfec.payload_type);
+ EXPECT_EQ(0U, config.rtp.flexfec.ssrc);
+ EXPECT_TRUE(config.rtp.flexfec.protected_media_ssrcs.empty());
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ FlexfecRecvCodecWithoutSsrcNotExposedByDefault) {
+ AddRecvStream();
+
+ const std::vector<FakeFlexfecReceiveStream*>& streams =
+ fake_call_->GetFlexfecReceiveStreams();
+ EXPECT_TRUE(streams.empty());
+}
+
+TEST_F(WebRtcVideoChannelTest, FlexfecRecvCodecWithSsrcNotExposedByDefault) {
+ AddRecvStream(
+ CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc));
+
+ const std::vector<FakeFlexfecReceiveStream*>& streams =
+ fake_call_->GetFlexfecReceiveStreams();
+ EXPECT_TRUE(streams.empty());
+}
+
+// TODO(brandtr): When FlexFEC is no longer behind a field trial, merge all
+// tests that use this test fixture into the corresponding "non-field trial"
+// tests.
+class WebRtcVideoChannelFlexfecRecvTest : public WebRtcVideoChannelTest {
+ public:
+ WebRtcVideoChannelFlexfecRecvTest()
+ : WebRtcVideoChannelTest("WebRTC-FlexFEC-03-Advertised/Enabled/") {}
+};
+
+TEST_F(WebRtcVideoChannelFlexfecRecvTest,
+ DefaultFlexfecCodecHasTransportCcAndRembFeedbackParam) {
+ EXPECT_TRUE(cricket::HasTransportCc(GetEngineCodec("flexfec-03")));
+ EXPECT_TRUE(cricket::HasRemb(GetEngineCodec("flexfec-03")));
+}
+
+TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetDefaultRecvCodecsWithoutSsrc) {
+ AddRecvStream();
+
+ const std::vector<FakeFlexfecReceiveStream*>& streams =
+ fake_call_->GetFlexfecReceiveStreams();
+ EXPECT_TRUE(streams.empty());
+}
+
+TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetDefaultRecvCodecsWithSsrc) {
+ AddRecvStream(
+ CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc));
+
+ const std::vector<FakeFlexfecReceiveStream*>& streams =
+ fake_call_->GetFlexfecReceiveStreams();
+ ASSERT_EQ(1U, streams.size());
+ const FakeFlexfecReceiveStream* stream = streams.front();
+ const webrtc::FlexfecReceiveStream::Config& config = stream->GetConfig();
+ EXPECT_EQ(GetEngineCodec("flexfec-03").id, config.payload_type);
+ EXPECT_EQ(kFlexfecSsrc, config.remote_ssrc);
+ ASSERT_EQ(1U, config.protected_media_ssrcs.size());
+ EXPECT_EQ(kSsrcs1[0], config.protected_media_ssrcs[0]);
+
+ const std::vector<FakeVideoReceiveStream*>& video_streams =
+ fake_call_->GetVideoReceiveStreams();
+ ASSERT_EQ(1U, video_streams.size());
+ const webrtc::VideoReceiveStream::Config& video_config =
+ video_streams.front()->GetConfig();
+ EXPECT_TRUE(video_config.rtp.protected_by_flexfec);
+}
+
+TEST_F(WebRtcVideoChannelFlexfecRecvTest,
+ EnablingFlexfecDoesNotRecreateVideoReceiveStream) {
+ cricket::VideoRecvParameters recv_parameters;
+ recv_parameters.codecs.push_back(GetEngineCodec("VP8"));
+ ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters));
+
+ AddRecvStream(
+ CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc));
+ EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams());
+ EXPECT_EQ(1U, fake_call_->GetVideoReceiveStreams().size());
+
+ // Enable FlexFEC.
+ recv_parameters.codecs.push_back(GetEngineCodec("flexfec-03"));
+ ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters));
+ EXPECT_EQ(2, fake_call_->GetNumCreatedReceiveStreams())
+ << "Enabling FlexFEC should create FlexfecReceiveStream.";
+ EXPECT_EQ(1U, fake_call_->GetVideoReceiveStreams().size())
+ << "Enabling FlexFEC should not create VideoReceiveStream.";
+ EXPECT_EQ(1U, fake_call_->GetFlexfecReceiveStreams().size())
+ << "Enabling FlexFEC should create a single FlexfecReceiveStream.";
+}
+
+TEST_F(WebRtcVideoChannelFlexfecRecvTest,
+ DisablingFlexfecDoesNotRecreateVideoReceiveStream) {
+ cricket::VideoRecvParameters recv_parameters;
+ recv_parameters.codecs.push_back(GetEngineCodec("VP8"));
+ recv_parameters.codecs.push_back(GetEngineCodec("flexfec-03"));
+ ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters));
+
+ AddRecvStream(
+ CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc));
+ EXPECT_EQ(2, fake_call_->GetNumCreatedReceiveStreams());
+ EXPECT_EQ(1U, fake_call_->GetVideoReceiveStreams().size());
+ EXPECT_EQ(1U, fake_call_->GetFlexfecReceiveStreams().size());
+
+ // Disable FlexFEC.
+ recv_parameters.codecs.clear();
+ recv_parameters.codecs.push_back(GetEngineCodec("VP8"));
+ ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters));
+ EXPECT_EQ(2, fake_call_->GetNumCreatedReceiveStreams())
+ << "Disabling FlexFEC should not recreate VideoReceiveStream.";
+ EXPECT_EQ(1U, fake_call_->GetVideoReceiveStreams().size())
+ << "Disabling FlexFEC should not destroy VideoReceiveStream.";
+ EXPECT_TRUE(fake_call_->GetFlexfecReceiveStreams().empty())
+ << "Disabling FlexFEC should destroy FlexfecReceiveStream.";
+}
+
+// TODO(brandtr): When FlexFEC is no longer behind a field trial, merge all
+// tests that use this test fixture into the corresponding "non-field trial"
+// tests.
+class WebRtcVideoChannelFlexfecSendRecvTest : public WebRtcVideoChannelTest {
+ public:
+ WebRtcVideoChannelFlexfecSendRecvTest()
+ : WebRtcVideoChannelTest(
+ "WebRTC-FlexFEC-03-Advertised/Enabled/WebRTC-FlexFEC-03/Enabled/") {
+ }
+};
+
+TEST_F(WebRtcVideoChannelFlexfecSendRecvTest,
+ SetDefaultSendCodecsWithoutSsrc) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Config config = stream->GetConfig().Copy();
+
+ EXPECT_EQ(GetEngineCodec("flexfec-03").id, config.rtp.flexfec.payload_type);
+ EXPECT_EQ(0U, config.rtp.flexfec.ssrc);
+ EXPECT_TRUE(config.rtp.flexfec.protected_media_ssrcs.empty());
+}
+
+TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, SetDefaultSendCodecsWithSsrc) {
+ FakeVideoSendStream* stream = AddSendStream(
+ CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc));
+ webrtc::VideoSendStream::Config config = stream->GetConfig().Copy();
+
+ EXPECT_EQ(GetEngineCodec("flexfec-03").id, config.rtp.flexfec.payload_type);
+ EXPECT_EQ(kFlexfecSsrc, config.rtp.flexfec.ssrc);
+ ASSERT_EQ(1U, config.rtp.flexfec.protected_media_ssrcs.size());
+ EXPECT_EQ(kSsrcs1[0], config.rtp.flexfec.protected_media_ssrcs[0]);
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithoutFec) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Config config = stream->GetConfig().Copy();
+
+ EXPECT_EQ(-1, config.rtp.ulpfec.ulpfec_payload_type);
+ EXPECT_EQ(-1, config.rtp.ulpfec.red_payload_type);
+}
+
+TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, SetSendCodecsWithoutFec) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Config config = stream->GetConfig().Copy();
+
+ EXPECT_EQ(-1, config.rtp.flexfec.payload_type);
+}
+
+TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvCodecsWithFec) {
+ AddRecvStream(
+ CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc));
+
+ cricket::VideoRecvParameters recv_parameters;
+ recv_parameters.codecs.push_back(GetEngineCodec("VP8"));
+ recv_parameters.codecs.push_back(GetEngineCodec("flexfec-03"));
+ ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters));
+
+ const std::vector<FakeFlexfecReceiveStream*>& flexfec_streams =
+ fake_call_->GetFlexfecReceiveStreams();
+ ASSERT_EQ(1U, flexfec_streams.size());
+ const FakeFlexfecReceiveStream* flexfec_stream = flexfec_streams.front();
+ const webrtc::FlexfecReceiveStream::Config& flexfec_stream_config =
+ flexfec_stream->GetConfig();
+ EXPECT_EQ(GetEngineCodec("flexfec-03").id,
+ flexfec_stream_config.payload_type);
+ EXPECT_EQ(kFlexfecSsrc, flexfec_stream_config.remote_ssrc);
+ ASSERT_EQ(1U, flexfec_stream_config.protected_media_ssrcs.size());
+ EXPECT_EQ(kSsrcs1[0], flexfec_stream_config.protected_media_ssrcs[0]);
+ const std::vector<FakeVideoReceiveStream*>& video_streams =
+ fake_call_->GetVideoReceiveStreams();
+ const FakeVideoReceiveStream* video_stream = video_streams.front();
+ const webrtc::VideoReceiveStream::Config& video_stream_config =
+ video_stream->GetConfig();
+ EXPECT_EQ(video_stream_config.rtp.local_ssrc,
+ flexfec_stream_config.local_ssrc);
+ EXPECT_EQ(video_stream_config.rtp.rtcp_mode, flexfec_stream_config.rtcp_mode);
+ EXPECT_EQ(video_stream_config.rtcp_send_transport,
+ flexfec_stream_config.rtcp_send_transport);
+ // TODO(brandtr): Update this EXPECT when we set |transport_cc| in a
+ // spec-compliant way.
+ EXPECT_EQ(video_stream_config.rtp.transport_cc,
+ flexfec_stream_config.transport_cc);
+ EXPECT_EQ(video_stream_config.rtp.rtcp_mode, flexfec_stream_config.rtcp_mode);
+ EXPECT_EQ(video_stream_config.rtp.extensions,
+ flexfec_stream_config.rtp_header_extensions);
+}
+
+// We should not send FlexFEC, even if we advertise it, unless the right
+// field trial is set.
+// TODO(brandtr): Remove when FlexFEC is enabled by default.
+TEST_F(WebRtcVideoChannelFlexfecRecvTest,
+ SetSendCodecsWithoutSsrcWithFecDoesNotEnableFec) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("flexfec-03"));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Config config = stream->GetConfig().Copy();
+
+ EXPECT_EQ(-1, config.rtp.flexfec.payload_type);
+ EXPECT_EQ(0, config.rtp.flexfec.ssrc);
+ EXPECT_TRUE(config.rtp.flexfec.protected_media_ssrcs.empty());
+}
+
+TEST_F(WebRtcVideoChannelFlexfecRecvTest,
+ SetSendCodecsWithSsrcWithFecDoesNotEnableFec) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("flexfec-03"));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+
+ FakeVideoSendStream* stream = AddSendStream(
+ CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc));
+ webrtc::VideoSendStream::Config config = stream->GetConfig().Copy();
+
+ EXPECT_EQ(-1, config.rtp.flexfec.payload_type);
+ EXPECT_EQ(0, config.rtp.flexfec.ssrc);
+ EXPECT_TRUE(config.rtp.flexfec.protected_media_ssrcs.empty());
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ SetSendCodecRejectsRtxWithoutAssociatedPayloadType) {
+ const int kUnusedPayloadType = 127;
+ EXPECT_FALSE(FindCodecById(engine_.codecs(), kUnusedPayloadType));
+
+ cricket::VideoSendParameters parameters;
+ cricket::VideoCodec rtx_codec(kUnusedPayloadType, "rtx");
+ parameters.codecs.push_back(rtx_codec);
+ EXPECT_FALSE(channel_->SetSendParameters(parameters))
+ << "RTX codec without associated payload type should be rejected.";
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ SetSendCodecRejectsRtxWithoutMatchingVideoCodec) {
+ const int kUnusedPayloadType1 = 126;
+ const int kUnusedPayloadType2 = 127;
+ EXPECT_FALSE(FindCodecById(engine_.codecs(), kUnusedPayloadType1));
+ EXPECT_FALSE(FindCodecById(engine_.codecs(), kUnusedPayloadType2));
+ {
+ cricket::VideoCodec rtx_codec = cricket::VideoCodec::CreateRtxCodec(
+ kUnusedPayloadType1, GetEngineCodec("VP8").id);
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(rtx_codec);
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+ }
+ {
+ cricket::VideoCodec rtx_codec = cricket::VideoCodec::CreateRtxCodec(
+ kUnusedPayloadType1, kUnusedPayloadType2);
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(rtx_codec);
+ EXPECT_FALSE(channel_->SetSendParameters(parameters))
+ << "RTX without matching video codec should be rejected.";
+ }
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithChangedRtxPayloadType) {
+ const int kUnusedPayloadType1 = 126;
+ const int kUnusedPayloadType2 = 127;
+ EXPECT_FALSE(FindCodecById(engine_.codecs(), kUnusedPayloadType1));
+ EXPECT_FALSE(FindCodecById(engine_.codecs(), kUnusedPayloadType2));
+
+ // SSRCs for RTX.
+ cricket::StreamParams params =
+ cricket::StreamParams::CreateLegacy(kSsrcs1[0]);
+ params.AddFidSsrc(kSsrcs1[0], kRtxSsrcs1[0]);
+ AddSendStream(params);
+
+ // Original payload type for RTX.
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ cricket::VideoCodec rtx_codec(kUnusedPayloadType1, "rtx");
+ rtx_codec.SetParam("apt", GetEngineCodec("VP8").id);
+ parameters.codecs.push_back(rtx_codec);
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ ASSERT_EQ(1U, fake_call_->GetVideoSendStreams().size());
+ const webrtc::VideoSendStream::Config& config_before =
+ fake_call_->GetVideoSendStreams()[0]->GetConfig();
+ EXPECT_EQ(kUnusedPayloadType1, config_before.rtp.rtx.payload_type);
+ ASSERT_EQ(1U, config_before.rtp.rtx.ssrcs.size());
+ EXPECT_EQ(kRtxSsrcs1[0], config_before.rtp.rtx.ssrcs[0]);
+
+ // Change payload type for RTX.
+ parameters.codecs[1].id = kUnusedPayloadType2;
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ ASSERT_EQ(1U, fake_call_->GetVideoSendStreams().size());
+ const webrtc::VideoSendStream::Config& config_after =
+ fake_call_->GetVideoSendStreams()[0]->GetConfig();
+ EXPECT_EQ(kUnusedPayloadType2, config_after.rtp.rtx.payload_type);
+ ASSERT_EQ(1U, config_after.rtp.rtx.ssrcs.size());
+ EXPECT_EQ(kRtxSsrcs1[0], config_after.rtp.rtx.ssrcs[0]);
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithoutFecDisablesFec) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("ulpfec"));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Config config = stream->GetConfig().Copy();
+
+ EXPECT_EQ(GetEngineCodec("ulpfec").id, config.rtp.ulpfec.ulpfec_payload_type);
+
+ parameters.codecs.pop_back();
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+ stream = fake_call_->GetVideoSendStreams()[0];
+ ASSERT_TRUE(stream != nullptr);
+ config = stream->GetConfig().Copy();
+ EXPECT_EQ(-1, config.rtp.ulpfec.ulpfec_payload_type)
+ << "SetSendCodec without ULPFEC should disable current ULPFEC.";
+}
+
+TEST_F(WebRtcVideoChannelFlexfecSendRecvTest,
+ SetSendCodecsWithoutFecDisablesFec) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("flexfec-03"));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+
+ FakeVideoSendStream* stream = AddSendStream(
+ CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc));
+ webrtc::VideoSendStream::Config config = stream->GetConfig().Copy();
+
+ EXPECT_EQ(GetEngineCodec("flexfec-03").id, config.rtp.flexfec.payload_type);
+ EXPECT_EQ(kFlexfecSsrc, config.rtp.flexfec.ssrc);
+ ASSERT_EQ(1U, config.rtp.flexfec.protected_media_ssrcs.size());
+ EXPECT_EQ(kSsrcs1[0], config.rtp.flexfec.protected_media_ssrcs[0]);
+
+ parameters.codecs.pop_back();
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+ stream = fake_call_->GetVideoSendStreams()[0];
+ ASSERT_TRUE(stream != nullptr);
+ config = stream->GetConfig().Copy();
+ EXPECT_EQ(-1, config.rtp.flexfec.payload_type)
+ << "SetSendCodec without FlexFEC should disable current FlexFEC.";
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSendCodecsChangesExistingStreams) {
+ cricket::VideoSendParameters parameters;
+ cricket::VideoCodec codec(100, "VP8");
+ codec.SetParam(kCodecParamMaxQuantization, kDefaultQpMax);
+ parameters.codecs.push_back(codec);
+
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+ channel_->SetSend(true);
+
+ FakeVideoSendStream* stream = AddSendStream();
+ cricket::FakeVideoCapturer capturer;
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, &capturer));
+
+ std::vector<webrtc::VideoStream> streams = stream->GetVideoStreams();
+ EXPECT_EQ(kDefaultQpMax, streams[0].max_qp);
+
+ parameters.codecs.clear();
+ codec.SetParam(kCodecParamMaxQuantization, kDefaultQpMax + 1);
+ parameters.codecs.push_back(codec);
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+ streams = fake_call_->GetVideoSendStreams()[0]->GetVideoStreams();
+ EXPECT_EQ(kDefaultQpMax + 1, streams[0].max_qp);
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithBitrates) {
+ SetSendCodecsShouldWorkForBitrates("100", 100000, "150", 150000, "200",
+ 200000);
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithHighMaxBitrate) {
+ SetSendCodecsShouldWorkForBitrates("", 0, "", -1, "10000", 10000000);
+ std::vector<webrtc::VideoStream> streams = AddSendStream()->GetVideoStreams();
+ ASSERT_EQ(1u, streams.size());
+ EXPECT_EQ(10000000, streams[0].max_bitrate_bps);
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ SetSendCodecsWithoutBitratesUsesCorrectDefaults) {
+ SetSendCodecsShouldWorkForBitrates(
+ "", 0, "", -1, "", -1);
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSendCodecsCapsMinAndStartBitrate) {
+ SetSendCodecsShouldWorkForBitrates("-1", 0, "-100", -1, "", -1);
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSendCodecsRejectsMaxLessThanMinBitrate) {
+ send_parameters_.codecs[0].params[kCodecParamMinBitrate] = "300";
+ send_parameters_.codecs[0].params[kCodecParamMaxBitrate] = "200";
+ EXPECT_FALSE(channel_->SetSendParameters(send_parameters_));
+}
+
+// Test that when both the codec-specific bitrate params and max_bandwidth_bps
+// are present in the same send parameters, the settings are combined correctly.
+TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithBitratesAndMaxSendBandwidth) {
+ send_parameters_.codecs[0].params[kCodecParamMinBitrate] = "100";
+ send_parameters_.codecs[0].params[kCodecParamStartBitrate] = "200";
+ send_parameters_.codecs[0].params[kCodecParamMaxBitrate] = "300";
+ send_parameters_.max_bandwidth_bps = 400000;
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_EQ(100000, fake_call_->GetConfig().bitrate_config.min_bitrate_bps);
+ EXPECT_EQ(200000, fake_call_->GetConfig().bitrate_config.start_bitrate_bps);
+ // We expect max_bandwidth_bps to take priority, if set.
+ EXPECT_EQ(400000, fake_call_->GetConfig().bitrate_config.max_bitrate_bps);
+
+ // Decrease max_bandwidth_bps.
+ send_parameters_.max_bandwidth_bps = 350000;
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_EQ(100000, fake_call_->GetConfig().bitrate_config.min_bitrate_bps);
+ // Since the codec isn't changing, start_bitrate_bps should be -1.
+ EXPECT_EQ(-1, fake_call_->GetConfig().bitrate_config.start_bitrate_bps);
+ EXPECT_EQ(350000, fake_call_->GetConfig().bitrate_config.max_bitrate_bps);
+
+ // Now try again with the values flipped around.
+ send_parameters_.codecs[0].params[kCodecParamMaxBitrate] = "400";
+ send_parameters_.max_bandwidth_bps = 300000;
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_EQ(100000, fake_call_->GetConfig().bitrate_config.min_bitrate_bps);
+ EXPECT_EQ(200000, fake_call_->GetConfig().bitrate_config.start_bitrate_bps);
+ EXPECT_EQ(300000, fake_call_->GetConfig().bitrate_config.max_bitrate_bps);
+
+ // If we change the codec max, max_bandwidth_bps should still apply.
+ send_parameters_.codecs[0].params[kCodecParamMaxBitrate] = "350";
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_EQ(100000, fake_call_->GetConfig().bitrate_config.min_bitrate_bps);
+ EXPECT_EQ(200000, fake_call_->GetConfig().bitrate_config.start_bitrate_bps);
+ EXPECT_EQ(300000, fake_call_->GetConfig().bitrate_config.max_bitrate_bps);
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ SetMaxSendBandwidthShouldPreserveOtherBitrates) {
+ SetSendCodecsShouldWorkForBitrates("100", 100000, "150", 150000, "200",
+ 200000);
+ send_parameters_.max_bandwidth_bps = 300000;
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_EQ(100000, fake_call_->GetConfig().bitrate_config.min_bitrate_bps)
+ << "Setting max bitrate should keep previous min bitrate.";
+ EXPECT_EQ(-1, fake_call_->GetConfig().bitrate_config.start_bitrate_bps)
+ << "Setting max bitrate should not reset start bitrate.";
+ EXPECT_EQ(300000, fake_call_->GetConfig().bitrate_config.max_bitrate_bps);
+}
+
+TEST_F(WebRtcVideoChannelTest, SetMaxSendBandwidthShouldBeRemovable) {
+ send_parameters_.max_bandwidth_bps = 300000;
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_EQ(300000, fake_call_->GetConfig().bitrate_config.max_bitrate_bps);
+ // -1 means to disable max bitrate (set infinite).
+ send_parameters_.max_bandwidth_bps = -1;
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_EQ(-1, fake_call_->GetConfig().bitrate_config.max_bitrate_bps)
+ << "Setting zero max bitrate did not reset start bitrate.";
+}
+
+TEST_F(WebRtcVideoChannelTest, SetMaxSendBandwidthAndAddSendStream) {
+ send_parameters_.max_bandwidth_bps = 99999;
+ FakeVideoSendStream* stream = AddSendStream();
+ ASSERT_TRUE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_EQ(send_parameters_.max_bandwidth_bps,
+ fake_call_->GetConfig().bitrate_config.max_bitrate_bps);
+ ASSERT_EQ(1u, stream->GetVideoStreams().size());
+ EXPECT_EQ(send_parameters_.max_bandwidth_bps,
+ stream->GetVideoStreams()[0].max_bitrate_bps);
+
+ send_parameters_.max_bandwidth_bps = 77777;
+ ASSERT_TRUE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_EQ(send_parameters_.max_bandwidth_bps,
+ fake_call_->GetConfig().bitrate_config.max_bitrate_bps);
+ EXPECT_EQ(send_parameters_.max_bandwidth_bps,
+ stream->GetVideoStreams()[0].max_bitrate_bps);
+}
+
+TEST_F(WebRtcVideoChannelTest, SetMaxSendBitrateCanIncreaseSenderBitrate) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+ channel_->SetSend(true);
+
+ FakeVideoSendStream* stream = AddSendStream();
+
+ cricket::FakeVideoCapturer capturer;
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, &capturer));
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer.Start(capturer.GetSupportedFormats()->front()));
+
+ std::vector<webrtc::VideoStream> streams = stream->GetVideoStreams();
+ int initial_max_bitrate_bps = streams[0].max_bitrate_bps;
+ EXPECT_GT(initial_max_bitrate_bps, 0);
+
+ parameters.max_bandwidth_bps = initial_max_bitrate_bps * 2;
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ // Insert a frame to update the encoder config.
+ EXPECT_TRUE(capturer.CaptureFrame());
+ streams = stream->GetVideoStreams();
+ EXPECT_EQ(initial_max_bitrate_bps * 2, streams[0].max_bitrate_bps);
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ SetMaxSendBitrateCanIncreaseSimulcastSenderBitrate) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+ channel_->SetSend(true);
+
+ FakeVideoSendStream* stream = AddSendStream(
+ cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3)));
+
+ // Send a frame to make sure this scales up to >1 stream (simulcast).
+ cricket::FakeVideoCapturer capturer;
+ EXPECT_TRUE(channel_->SetVideoSend(kSsrcs3[0], true, nullptr, &capturer));
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer.Start(capturer.GetSupportedFormats()->front()));
+ EXPECT_TRUE(capturer.CaptureFrame());
+
+ std::vector<webrtc::VideoStream> streams = stream->GetVideoStreams();
+ ASSERT_GT(streams.size(), 1u)
+ << "Without simulcast this test doesn't make sense.";
+ int initial_max_bitrate_bps = GetTotalMaxBitrateBps(streams);
+ EXPECT_GT(initial_max_bitrate_bps, 0);
+
+ parameters.max_bandwidth_bps = initial_max_bitrate_bps * 2;
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ // Insert a frame to update the encoder config.
+ EXPECT_TRUE(capturer.CaptureFrame());
+ streams = stream->GetVideoStreams();
+ int increased_max_bitrate_bps = GetTotalMaxBitrateBps(streams);
+ EXPECT_EQ(initial_max_bitrate_bps * 2, increased_max_bitrate_bps);
+
+ EXPECT_TRUE(channel_->SetVideoSend(kSsrcs3[0], true, nullptr, nullptr));
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithMaxQuantization) {
+ static const char* kMaxQuantization = "21";
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs[0].params[kCodecParamMaxQuantization] = kMaxQuantization;
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ EXPECT_EQ(static_cast<unsigned int>(atoi(kMaxQuantization)),
+ AddSendStream()->GetVideoStreams().back().max_qp);
+
+ VideoCodec codec;
+ EXPECT_TRUE(channel_->GetSendCodec(&codec));
+ EXPECT_EQ(kMaxQuantization, codec.params[kCodecParamMaxQuantization]);
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSendCodecsRejectBadPayloadTypes) {
+ // TODO(pbos): Should we only allow the dynamic range?
+ static const int kIncorrectPayloads[] = {-2, -1, 128, 129};
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ for (size_t i = 0; i < arraysize(kIncorrectPayloads); ++i) {
+ parameters.codecs[0].id = kIncorrectPayloads[i];
+ EXPECT_FALSE(channel_->SetSendParameters(parameters))
+ << "Bad payload type '" << kIncorrectPayloads[i] << "' accepted.";
+ }
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSendCodecsAcceptAllValidPayloadTypes) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ for (int payload_type = 96; payload_type <= 127; ++payload_type) {
+ parameters.codecs[0].id = payload_type;
+ EXPECT_TRUE(channel_->SetSendParameters(parameters))
+ << "Payload type '" << payload_type << "' rejected.";
+ }
+}
+
+// Test that setting the a different set of codecs but with an identical front
+// codec doesn't result in the stream being recreated.
+// This may happen when a subsequent negotiation includes fewer codecs, as a
+// result of one of the codecs being rejected.
+TEST_F(WebRtcVideoChannelTest,
+ SetSendCodecsIdenticalFirstCodecDoesntRecreateStream) {
+ cricket::VideoSendParameters parameters1;
+ parameters1.codecs.push_back(GetEngineCodec("VP8"));
+ parameters1.codecs.push_back(GetEngineCodec("VP9"));
+ EXPECT_TRUE(channel_->SetSendParameters(parameters1));
+
+ AddSendStream();
+ EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams());
+
+ cricket::VideoSendParameters parameters2;
+ parameters2.codecs.push_back(GetEngineCodec("VP8"));
+ EXPECT_TRUE(channel_->SetSendParameters(parameters2));
+ EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams());
+}
+
+TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithOnlyVp8) {
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+}
+
+// Test that we set our inbound RTX codecs properly.
+TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithRtx) {
+ const int kUnusedPayloadType1 = 126;
+ const int kUnusedPayloadType2 = 127;
+ EXPECT_FALSE(FindCodecById(engine_.codecs(), kUnusedPayloadType1));
+ EXPECT_FALSE(FindCodecById(engine_.codecs(), kUnusedPayloadType2));
+
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ cricket::VideoCodec rtx_codec(kUnusedPayloadType1, "rtx");
+ parameters.codecs.push_back(rtx_codec);
+ EXPECT_FALSE(channel_->SetRecvParameters(parameters))
+ << "RTX codec without associated payload should be rejected.";
+
+ parameters.codecs[1].SetParam("apt", kUnusedPayloadType2);
+ EXPECT_FALSE(channel_->SetRecvParameters(parameters))
+ << "RTX codec with invalid associated payload type should be rejected.";
+
+ parameters.codecs[1].SetParam("apt", GetEngineCodec("VP8").id);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ cricket::VideoCodec rtx_codec2(kUnusedPayloadType2, "rtx");
+ rtx_codec2.SetParam("apt", rtx_codec.id);
+ parameters.codecs.push_back(rtx_codec2);
+
+ EXPECT_FALSE(channel_->SetRecvParameters(parameters)) <<
+ "RTX codec with another RTX as associated payload type should be "
+ "rejected.";
+}
+
+TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithChangedRtxPayloadType) {
+ const int kUnusedPayloadType1 = 126;
+ const int kUnusedPayloadType2 = 127;
+ EXPECT_FALSE(FindCodecById(engine_.codecs(), kUnusedPayloadType1));
+ EXPECT_FALSE(FindCodecById(engine_.codecs(), kUnusedPayloadType2));
+
+ // SSRCs for RTX.
+ cricket::StreamParams params =
+ cricket::StreamParams::CreateLegacy(kSsrcs1[0]);
+ params.AddFidSsrc(kSsrcs1[0], kRtxSsrcs1[0]);
+ AddRecvStream(params);
+
+ // Original payload type for RTX.
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ cricket::VideoCodec rtx_codec(kUnusedPayloadType1, "rtx");
+ rtx_codec.SetParam("apt", GetEngineCodec("VP8").id);
+ parameters.codecs.push_back(rtx_codec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ ASSERT_EQ(1U, fake_call_->GetVideoReceiveStreams().size());
+ const webrtc::VideoReceiveStream::Config& config_before =
+ fake_call_->GetVideoReceiveStreams()[0]->GetConfig();
+ EXPECT_EQ(1U, config_before.rtp.rtx_associated_payload_types.size());
+ const int* payload_type_before = FindKeyByValue(
+ config_before.rtp.rtx_associated_payload_types, GetEngineCodec("VP8").id);
+ ASSERT_NE(payload_type_before, nullptr);
+ EXPECT_EQ(kUnusedPayloadType1, *payload_type_before);
+ EXPECT_EQ(kRtxSsrcs1[0], config_before.rtp.rtx_ssrc);
+
+ // Change payload type for RTX.
+ parameters.codecs[1].id = kUnusedPayloadType2;
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ ASSERT_EQ(1U, fake_call_->GetVideoReceiveStreams().size());
+ const webrtc::VideoReceiveStream::Config& config_after =
+ fake_call_->GetVideoReceiveStreams()[0]->GetConfig();
+ EXPECT_EQ(1U, config_after.rtp.rtx_associated_payload_types.size());
+ const int* payload_type_after = FindKeyByValue(
+ config_after.rtp.rtx_associated_payload_types, GetEngineCodec("VP8").id);
+ ASSERT_NE(payload_type_after, nullptr);
+ EXPECT_EQ(kUnusedPayloadType2, *payload_type_after);
+ EXPECT_EQ(kRtxSsrcs1[0], config_after.rtp.rtx_ssrc);
+}
+
+TEST_F(WebRtcVideoChannelTest, SetRecvCodecsDifferentPayloadType) {
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs[0].id = 99;
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+}
+
+TEST_F(WebRtcVideoChannelTest, SetRecvCodecsAcceptDefaultCodecs) {
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs = engine_.codecs();
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ const webrtc::VideoReceiveStream::Config& config = stream->GetConfig();
+ EXPECT_EQ(engine_.codecs()[0].name, config.decoders[0].payload_name);
+ EXPECT_EQ(engine_.codecs()[0].id, config.decoders[0].payload_type);
+}
+
+TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRejectUnsupportedCodec) {
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(VideoCodec(101, "WTF3"));
+ EXPECT_FALSE(channel_->SetRecvParameters(parameters));
+}
+
+TEST_F(WebRtcVideoChannelTest, SetRecvCodecsAcceptsMultipleVideoCodecs) {
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("VP9"));
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+}
+
+TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithoutFecDisablesFec) {
+ cricket::VideoSendParameters send_parameters;
+ send_parameters.codecs.push_back(GetEngineCodec("VP8"));
+ send_parameters.codecs.push_back(GetEngineCodec("red"));
+ send_parameters.codecs.push_back(GetEngineCodec("ulpfec"));
+ ASSERT_TRUE(channel_->SetSendParameters(send_parameters));
+
+ FakeVideoReceiveStream* stream = AddRecvStream();
+
+ EXPECT_EQ(GetEngineCodec("ulpfec").id,
+ stream->GetConfig().rtp.ulpfec_payload_type);
+
+ cricket::VideoRecvParameters recv_parameters;
+ recv_parameters.codecs.push_back(GetEngineCodec("VP8"));
+ ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters));
+ stream = fake_call_->GetVideoReceiveStreams()[0];
+ ASSERT_TRUE(stream != nullptr);
+ EXPECT_EQ(-1, stream->GetConfig().rtp.ulpfec_payload_type)
+ << "SetSendCodec without ULPFEC should disable current ULPFEC.";
+}
+
+TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvParamsWithoutFecDisablesFec) {
+ AddRecvStream(
+ CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc));
+ const std::vector<FakeFlexfecReceiveStream*>& streams =
+ fake_call_->GetFlexfecReceiveStreams();
+
+ ASSERT_EQ(1U, streams.size());
+ const FakeFlexfecReceiveStream* stream = streams.front();
+ EXPECT_EQ(GetEngineCodec("flexfec-03").id, stream->GetConfig().payload_type);
+ EXPECT_EQ(kFlexfecSsrc, stream->GetConfig().remote_ssrc);
+ ASSERT_EQ(1U, stream->GetConfig().protected_media_ssrcs.size());
+ EXPECT_EQ(kSsrcs1[0], stream->GetConfig().protected_media_ssrcs[0]);
+
+ cricket::VideoRecvParameters recv_parameters;
+ recv_parameters.codecs.push_back(GetEngineCodec("VP8"));
+ ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters));
+ EXPECT_TRUE(streams.empty())
+ << "SetSendCodec without FlexFEC should disable current FlexFEC.";
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSendParamsWithFecEnablesFec) {
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ EXPECT_EQ(GetEngineCodec("ulpfec").id,
+ stream->GetConfig().rtp.ulpfec_payload_type);
+
+ cricket::VideoRecvParameters recv_parameters;
+ recv_parameters.codecs.push_back(GetEngineCodec("VP8"));
+ recv_parameters.codecs.push_back(GetEngineCodec("red"));
+ recv_parameters.codecs.push_back(GetEngineCodec("ulpfec"));
+ ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters));
+ stream = fake_call_->GetVideoReceiveStreams()[0];
+ ASSERT_TRUE(stream != nullptr);
+ EXPECT_EQ(GetEngineCodec("ulpfec").id,
+ stream->GetConfig().rtp.ulpfec_payload_type)
+ << "ULPFEC should be enabled on the receive stream.";
+
+ cricket::VideoSendParameters send_parameters;
+ send_parameters.codecs.push_back(GetEngineCodec("VP8"));
+ send_parameters.codecs.push_back(GetEngineCodec("red"));
+ send_parameters.codecs.push_back(GetEngineCodec("ulpfec"));
+ ASSERT_TRUE(channel_->SetSendParameters(send_parameters));
+ stream = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_EQ(GetEngineCodec("ulpfec").id,
+ stream->GetConfig().rtp.ulpfec_payload_type)
+ << "ULPFEC should be enabled on the receive stream.";
+}
+
+TEST_F(WebRtcVideoChannelFlexfecSendRecvTest,
+ SetSendRecvParamsWithFecEnablesFec) {
+ AddRecvStream(
+ CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc));
+ const std::vector<FakeFlexfecReceiveStream*>& streams =
+ fake_call_->GetFlexfecReceiveStreams();
+
+ cricket::VideoRecvParameters recv_parameters;
+ recv_parameters.codecs.push_back(GetEngineCodec("VP8"));
+ recv_parameters.codecs.push_back(GetEngineCodec("flexfec-03"));
+ ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters));
+ ASSERT_EQ(1U, streams.size());
+ const FakeFlexfecReceiveStream* stream_with_recv_params = streams.front();
+ EXPECT_EQ(GetEngineCodec("flexfec-03").id,
+ stream_with_recv_params->GetConfig().payload_type);
+ EXPECT_EQ(kFlexfecSsrc, stream_with_recv_params->GetConfig().remote_ssrc);
+ EXPECT_EQ(1U,
+ stream_with_recv_params->GetConfig().protected_media_ssrcs.size());
+ EXPECT_EQ(kSsrcs1[0],
+ stream_with_recv_params->GetConfig().protected_media_ssrcs[0]);
+
+ cricket::VideoSendParameters send_parameters;
+ send_parameters.codecs.push_back(GetEngineCodec("VP8"));
+ send_parameters.codecs.push_back(GetEngineCodec("flexfec-03"));
+ ASSERT_TRUE(channel_->SetSendParameters(send_parameters));
+ ASSERT_EQ(1U, streams.size());
+ const FakeFlexfecReceiveStream* stream_with_send_params = streams.front();
+ EXPECT_EQ(GetEngineCodec("flexfec-03").id,
+ stream_with_send_params->GetConfig().payload_type);
+ EXPECT_EQ(kFlexfecSsrc, stream_with_send_params->GetConfig().remote_ssrc);
+ EXPECT_EQ(1U,
+ stream_with_send_params->GetConfig().protected_media_ssrcs.size());
+ EXPECT_EQ(kSsrcs1[0],
+ stream_with_send_params->GetConfig().protected_media_ssrcs[0]);
+}
+
+TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRejectDuplicateFecPayloads) {
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("red"));
+ parameters.codecs[1].id = parameters.codecs[0].id;
+ EXPECT_FALSE(channel_->SetRecvParameters(parameters));
+}
+
+TEST_F(WebRtcVideoChannelFlexfecRecvTest,
+ SetRecvCodecsRejectDuplicateFecPayloads) {
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("flexfec-03"));
+ parameters.codecs[1].id = parameters.codecs[0].id;
+ EXPECT_FALSE(channel_->SetRecvParameters(parameters));
+}
+
+TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRejectDuplicateCodecPayloads) {
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("VP9"));
+ parameters.codecs[1].id = parameters.codecs[0].id;
+ EXPECT_FALSE(channel_->SetRecvParameters(parameters));
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ SetRecvCodecsAcceptSameCodecOnMultiplePayloadTypes) {
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs[1].id += 1;
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+}
+
+// Test that setting the same codecs but with a different order
+// doesn't result in the stream being recreated.
+TEST_F(WebRtcVideoChannelTest,
+ SetRecvCodecsDifferentOrderDoesntRecreateStream) {
+ cricket::VideoRecvParameters parameters1;
+ parameters1.codecs.push_back(GetEngineCodec("VP8"));
+ parameters1.codecs.push_back(GetEngineCodec("red"));
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters1));
+
+ AddRecvStream(cricket::StreamParams::CreateLegacy(123));
+ EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams());
+
+ cricket::VideoRecvParameters parameters2;
+ parameters2.codecs.push_back(GetEngineCodec("red"));
+ parameters2.codecs.push_back(GetEngineCodec("VP8"));
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters2));
+ EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams());
+}
+
+TEST_F(WebRtcVideoChannelTest, SendStreamNotSendingByDefault) {
+ EXPECT_FALSE(AddSendStream()->IsSending());
+}
+
+TEST_F(WebRtcVideoChannelTest, ReceiveStreamReceivingByDefault) {
+ EXPECT_TRUE(AddRecvStream()->IsReceiving());
+}
+
+TEST_F(WebRtcVideoChannelTest, SetSend) {
+ FakeVideoSendStream* stream = AddSendStream();
+ EXPECT_FALSE(stream->IsSending());
+
+ // false->true
+ EXPECT_TRUE(channel_->SetSend(true));
+ EXPECT_TRUE(stream->IsSending());
+ // true->true
+ EXPECT_TRUE(channel_->SetSend(true));
+ EXPECT_TRUE(stream->IsSending());
+ // true->false
+ EXPECT_TRUE(channel_->SetSend(false));
+ EXPECT_FALSE(stream->IsSending());
+ // false->false
+ EXPECT_TRUE(channel_->SetSend(false));
+ EXPECT_FALSE(stream->IsSending());
+
+ EXPECT_TRUE(channel_->SetSend(true));
+ FakeVideoSendStream* new_stream = AddSendStream();
+ EXPECT_TRUE(new_stream->IsSending())
+ << "Send stream created after SetSend(true) not sending initially.";
+}
+
+// This test verifies DSCP settings are properly applied on video media channel.
+TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) {
+ std::unique_ptr<cricket::FakeNetworkInterface> network_interface(
+ new cricket::FakeNetworkInterface);
+ MediaConfig config;
+ std::unique_ptr<VideoMediaChannel> channel;
+
+ channel.reset(engine_.CreateChannel(call_.get(), config, VideoOptions()));
+ channel->SetInterface(network_interface.get());
+ // Default value when DSCP is disabled should be DSCP_DEFAULT.
+ EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp());
+
+ config.enable_dscp = true;
+ channel.reset(engine_.CreateChannel(call_.get(), config, VideoOptions()));
+ channel->SetInterface(network_interface.get());
+ EXPECT_EQ(rtc::DSCP_AF41, network_interface->dscp());
+
+ // Verify that setting the option to false resets the
+ // DiffServCodePoint.
+ config.enable_dscp = false;
+ channel.reset(engine_.CreateChannel(call_.get(), config, VideoOptions()));
+ channel->SetInterface(network_interface.get());
+ EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp());
+}
+
+// This test verifies that the RTCP reduced size mode is properly applied to
+// send video streams.
+TEST_F(WebRtcVideoChannelTest, TestSetSendRtcpReducedSize) {
+ // Create stream, expecting that default mode is "compound".
+ FakeVideoSendStream* stream1 = AddSendStream();
+ EXPECT_EQ(webrtc::RtcpMode::kCompound, stream1->GetConfig().rtp.rtcp_mode);
+
+ // Now enable reduced size mode.
+ send_parameters_.rtcp.reduced_size = true;
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ stream1 = fake_call_->GetVideoSendStreams()[0];
+ EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream1->GetConfig().rtp.rtcp_mode);
+
+ // Create a new stream and ensure it picks up the reduced size mode.
+ FakeVideoSendStream* stream2 = AddSendStream();
+ EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream2->GetConfig().rtp.rtcp_mode);
+}
+
+// This test verifies that the RTCP reduced size mode is properly applied to
+// receive video streams.
+TEST_F(WebRtcVideoChannelTest, TestSetRecvRtcpReducedSize) {
+ // Create stream, expecting that default mode is "compound".
+ FakeVideoReceiveStream* stream1 = AddRecvStream();
+ EXPECT_EQ(webrtc::RtcpMode::kCompound, stream1->GetConfig().rtp.rtcp_mode);
+
+ // Now enable reduced size mode.
+ // TODO(deadbeef): Once "recv_parameters" becomes "receiver_parameters",
+ // the reduced_size flag should come from that.
+ send_parameters_.rtcp.reduced_size = true;
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ stream1 = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream1->GetConfig().rtp.rtcp_mode);
+
+ // Create a new stream and ensure it picks up the reduced size mode.
+ FakeVideoReceiveStream* stream2 = AddRecvStream();
+ EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream2->GetConfig().rtp.rtcp_mode);
+}
+
+TEST_F(WebRtcVideoChannelTest, OnReadyToSendSignalsNetworkState) {
+ EXPECT_EQ(webrtc::kNetworkUp,
+ fake_call_->GetNetworkState(webrtc::MediaType::VIDEO));
+ EXPECT_EQ(webrtc::kNetworkUp,
+ fake_call_->GetNetworkState(webrtc::MediaType::AUDIO));
+
+ channel_->OnReadyToSend(false);
+ EXPECT_EQ(webrtc::kNetworkDown,
+ fake_call_->GetNetworkState(webrtc::MediaType::VIDEO));
+ EXPECT_EQ(webrtc::kNetworkUp,
+ fake_call_->GetNetworkState(webrtc::MediaType::AUDIO));
+
+ channel_->OnReadyToSend(true);
+ EXPECT_EQ(webrtc::kNetworkUp,
+ fake_call_->GetNetworkState(webrtc::MediaType::VIDEO));
+ EXPECT_EQ(webrtc::kNetworkUp,
+ fake_call_->GetNetworkState(webrtc::MediaType::AUDIO));
+}
+
+TEST_F(WebRtcVideoChannelTest, GetStatsReportsSentCodecName) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+
+ AddSendStream();
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ EXPECT_EQ("VP8", info.senders[0].codec_name);
+}
+
+TEST_F(WebRtcVideoChannelTest, GetStatsReportsEncoderImplementationName) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Stats stats;
+ stats.encoder_implementation_name = "encoder_implementation_name";
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ EXPECT_EQ(stats.encoder_implementation_name,
+ info.senders[0].encoder_implementation_name);
+}
+
+TEST_F(WebRtcVideoChannelTest, GetStatsReportsCpuOveruseMetrics) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Stats stats;
+ stats.avg_encode_time_ms = 13;
+ stats.encode_usage_percent = 42;
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ EXPECT_EQ(stats.avg_encode_time_ms, info.senders[0].avg_encode_ms);
+ EXPECT_EQ(stats.encode_usage_percent, info.senders[0].encode_usage_percent);
+}
+
+TEST_F(WebRtcVideoChannelTest, GetStatsReportsFramesEncoded) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Stats stats;
+ stats.frames_encoded = 13;
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ EXPECT_EQ(stats.frames_encoded, info.senders[0].frames_encoded);
+}
+
+TEST_F(WebRtcVideoChannelTest, GetStatsReportsQpSum) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Stats stats;
+ stats.qp_sum = 13;
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ EXPECT_EQ(stats.qp_sum, info.senders[0].qp_sum);
+}
+
+TEST_F(WebRtcVideoChannelTest, GetStatsReportsUpperResolution) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Stats stats;
+ stats.substreams[17].width = 123;
+ stats.substreams[17].height = 40;
+ stats.substreams[42].width = 80;
+ stats.substreams[42].height = 31;
+ stats.substreams[11].width = 20;
+ stats.substreams[11].height = 90;
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(1u, info.senders.size());
+ EXPECT_EQ(123, info.senders[0].send_frame_width);
+ EXPECT_EQ(90, info.senders[0].send_frame_height);
+}
+
+TEST_F(WebRtcVideoChannelTest, GetStatsReportsPreferredBitrate) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Stats stats;
+ stats.preferred_media_bitrate_bps = 5;
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(1u, info.senders.size());
+ EXPECT_EQ(5, info.senders[0].preferred_bitrate);
+}
+
+TEST_F(WebRtcVideoChannelTest, GetStatsReportsCpuAdaptationStats) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Stats stats;
+ stats.number_of_cpu_adapt_changes = 2;
+ stats.cpu_limited_resolution = true;
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ EXPECT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(1U, info.senders.size());
+ EXPECT_EQ(WebRtcVideoChannel::ADAPTREASON_CPU, info.senders[0].adapt_reason);
+ EXPECT_EQ(stats.number_of_cpu_adapt_changes, info.senders[0].adapt_changes);
+}
+
+TEST_F(WebRtcVideoChannelTest, GetStatsReportsAdaptationAndBandwidthStats) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Stats stats;
+ stats.number_of_cpu_adapt_changes = 2;
+ stats.cpu_limited_resolution = true;
+ stats.bw_limited_resolution = true;
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ EXPECT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(1U, info.senders.size());
+ EXPECT_EQ(WebRtcVideoChannel::ADAPTREASON_CPU |
+ WebRtcVideoChannel::ADAPTREASON_BANDWIDTH,
+ info.senders[0].adapt_reason);
+ EXPECT_EQ(stats.number_of_cpu_adapt_changes, info.senders[0].adapt_changes);
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ GetStatsTranslatesBandwidthLimitedResolutionCorrectly) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Stats stats;
+ stats.bw_limited_resolution = true;
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ EXPECT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(1U, info.senders.size());
+ EXPECT_EQ(WebRtcVideoChannel::ADAPTREASON_BANDWIDTH,
+ info.senders[0].adapt_reason);
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ GetStatsTranslatesSendRtcpPacketTypesCorrectly) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Stats stats;
+ stats.substreams[17].rtcp_packet_type_counts.fir_packets = 2;
+ stats.substreams[17].rtcp_packet_type_counts.nack_packets = 3;
+ stats.substreams[17].rtcp_packet_type_counts.pli_packets = 4;
+
+ stats.substreams[42].rtcp_packet_type_counts.fir_packets = 5;
+ stats.substreams[42].rtcp_packet_type_counts.nack_packets = 7;
+ stats.substreams[42].rtcp_packet_type_counts.pli_packets = 9;
+
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ EXPECT_EQ(7, info.senders[0].firs_rcvd);
+ EXPECT_EQ(10, info.senders[0].nacks_rcvd);
+ EXPECT_EQ(13, info.senders[0].plis_rcvd);
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ GetStatsTranslatesReceiveRtcpPacketTypesCorrectly) {
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ webrtc::VideoReceiveStream::Stats stats;
+ stats.rtcp_packet_type_counts.fir_packets = 2;
+ stats.rtcp_packet_type_counts.nack_packets = 3;
+ stats.rtcp_packet_type_counts.pli_packets = 4;
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ EXPECT_EQ(stats.rtcp_packet_type_counts.fir_packets,
+ info.receivers[0].firs_sent);
+ EXPECT_EQ(stats.rtcp_packet_type_counts.nack_packets,
+ info.receivers[0].nacks_sent);
+ EXPECT_EQ(stats.rtcp_packet_type_counts.pli_packets,
+ info.receivers[0].plis_sent);
+}
+
+TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesDecodeStatsCorrectly) {
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ webrtc::VideoReceiveStream::Stats stats;
+ stats.decoder_implementation_name = "decoder_implementation_name";
+ stats.decode_ms = 2;
+ stats.max_decode_ms = 3;
+ stats.current_delay_ms = 4;
+ stats.target_delay_ms = 5;
+ stats.jitter_buffer_ms = 6;
+ stats.min_playout_delay_ms = 7;
+ stats.render_delay_ms = 8;
+ stats.width = 9;
+ stats.height = 10;
+ stats.frame_counts.key_frames = 11;
+ stats.frame_counts.delta_frames = 12;
+ stats.frames_rendered = 13;
+ stats.frames_decoded = 14;
+ stats.qp_sum = 15;
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ EXPECT_EQ(stats.decoder_implementation_name,
+ info.receivers[0].decoder_implementation_name);
+ EXPECT_EQ(stats.decode_ms, info.receivers[0].decode_ms);
+ EXPECT_EQ(stats.max_decode_ms, info.receivers[0].max_decode_ms);
+ EXPECT_EQ(stats.current_delay_ms, info.receivers[0].current_delay_ms);
+ EXPECT_EQ(stats.target_delay_ms, info.receivers[0].target_delay_ms);
+ EXPECT_EQ(stats.jitter_buffer_ms, info.receivers[0].jitter_buffer_ms);
+ EXPECT_EQ(stats.min_playout_delay_ms, info.receivers[0].min_playout_delay_ms);
+ EXPECT_EQ(stats.render_delay_ms, info.receivers[0].render_delay_ms);
+ EXPECT_EQ(stats.width, info.receivers[0].frame_width);
+ EXPECT_EQ(stats.height, info.receivers[0].frame_height);
+ EXPECT_EQ(stats.frame_counts.key_frames + stats.frame_counts.delta_frames,
+ info.receivers[0].frames_received);
+ EXPECT_EQ(stats.frames_rendered, info.receivers[0].frames_rendered);
+ EXPECT_EQ(stats.frames_decoded, info.receivers[0].frames_decoded);
+ EXPECT_EQ(stats.qp_sum, info.receivers[0].qp_sum);
+}
+
+TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesReceivePacketStatsCorrectly) {
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ webrtc::VideoReceiveStream::Stats stats;
+ stats.rtp_stats.transmitted.payload_bytes = 2;
+ stats.rtp_stats.transmitted.header_bytes = 3;
+ stats.rtp_stats.transmitted.padding_bytes = 4;
+ stats.rtp_stats.transmitted.packets = 5;
+ stats.rtcp_stats.packets_lost = 6;
+ stats.rtcp_stats.fraction_lost = 7;
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ EXPECT_EQ(stats.rtp_stats.transmitted.payload_bytes +
+ stats.rtp_stats.transmitted.header_bytes +
+ stats.rtp_stats.transmitted.padding_bytes,
+ info.receivers[0].bytes_rcvd);
+ EXPECT_EQ(stats.rtp_stats.transmitted.packets,
+ info.receivers[0].packets_rcvd);
+ EXPECT_EQ(stats.rtcp_stats.packets_lost, info.receivers[0].packets_lost);
+ EXPECT_EQ(static_cast<float>(stats.rtcp_stats.fraction_lost) / (1 << 8),
+ info.receivers[0].fraction_lost);
+}
+
+TEST_F(WebRtcVideoChannelTest, TranslatesCallStatsCorrectly) {
+ AddSendStream();
+ AddSendStream();
+ webrtc::Call::Stats stats;
+ stats.rtt_ms = 123;
+ fake_call_->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(2u, info.senders.size());
+ EXPECT_EQ(stats.rtt_ms, info.senders[0].rtt_ms);
+ EXPECT_EQ(stats.rtt_ms, info.senders[1].rtt_ms);
+}
+
+TEST_F(WebRtcVideoChannelTest, TranslatesSenderBitrateStatsCorrectly) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Stats stats;
+ stats.target_media_bitrate_bps = 156;
+ stats.media_bitrate_bps = 123;
+ stats.substreams[17].total_bitrate_bps = 1;
+ stats.substreams[17].retransmit_bitrate_bps = 2;
+ stats.substreams[42].total_bitrate_bps = 3;
+ stats.substreams[42].retransmit_bitrate_bps = 4;
+ stream->SetStats(stats);
+
+ FakeVideoSendStream* stream2 = AddSendStream();
+ webrtc::VideoSendStream::Stats stats2;
+ stats2.target_media_bitrate_bps = 200;
+ stats2.media_bitrate_bps = 321;
+ stats2.substreams[13].total_bitrate_bps = 5;
+ stats2.substreams[13].retransmit_bitrate_bps = 6;
+ stats2.substreams[21].total_bitrate_bps = 7;
+ stats2.substreams[21].retransmit_bitrate_bps = 8;
+ stream2->SetStats(stats2);
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(2u, info.senders.size());
+ BandwidthEstimationInfo bwe_info;
+ channel_->FillBitrateInfo(&bwe_info);
+ // Assuming stream and stream2 corresponds to senders[0] and [1] respectively
+ // is OK as std::maps are sorted and AddSendStream() gives increasing SSRCs.
+ EXPECT_EQ(stats.media_bitrate_bps, info.senders[0].nominal_bitrate);
+ EXPECT_EQ(stats2.media_bitrate_bps, info.senders[1].nominal_bitrate);
+ EXPECT_EQ(stats.target_media_bitrate_bps + stats2.target_media_bitrate_bps,
+ bwe_info.target_enc_bitrate);
+ EXPECT_EQ(stats.media_bitrate_bps + stats2.media_bitrate_bps,
+ bwe_info.actual_enc_bitrate);
+ EXPECT_EQ(1 + 3 + 5 + 7, bwe_info.transmit_bitrate)
+ << "Bandwidth stats should take all streams into account.";
+ EXPECT_EQ(2 + 4 + 6 + 8, bwe_info.retransmit_bitrate)
+ << "Bandwidth stats should take all streams into account.";
+}
+
+TEST_F(WebRtcVideoChannelTest, DefaultReceiveStreamReconfiguresToUseRtx) {
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+
+ const std::vector<uint32_t> ssrcs = MAKE_VECTOR(kSsrcs1);
+ const std::vector<uint32_t> rtx_ssrcs = MAKE_VECTOR(kRtxSsrcs1);
+
+ ASSERT_EQ(0u, fake_call_->GetVideoReceiveStreams().size());
+ const size_t kDataLength = 12;
+ uint8_t data[kDataLength];
+ memset(data, 0, sizeof(data));
+ rtc::SetBE32(&data[8], ssrcs[0]);
+ rtc::CopyOnWriteBuffer packet(data, kDataLength);
+ rtc::PacketTime packet_time;
+ channel_->OnPacketReceived(&packet, packet_time);
+
+ ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size())
+ << "No default receive stream created.";
+ FakeVideoReceiveStream* recv_stream = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_EQ(0u, recv_stream->GetConfig().rtp.rtx_ssrc)
+ << "Default receive stream should not have configured RTX";
+
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs)));
+ ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size())
+ << "AddRecvStream should have reconfigured, not added a new receiver.";
+ recv_stream = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_FALSE(
+ recv_stream->GetConfig().rtp.rtx_associated_payload_types.empty());
+ EXPECT_TRUE(VerifyRtxReceiveAssociations(recv_stream->GetConfig()))
+ << "RTX should be mapped for all decoders/payload types.";
+ EXPECT_TRUE(HasRtxReceiveAssociation(recv_stream->GetConfig(),
+ GetEngineCodec("red").id))
+ << "RTX should be mapped also for the RED payload type";
+ EXPECT_EQ(rtx_ssrcs[0], recv_stream->GetConfig().rtp.rtx_ssrc);
+}
+
+TEST_F(WebRtcVideoChannelTest, RejectsAddingStreamsWithMissingSsrcsForRtx) {
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+
+ const std::vector<uint32_t> ssrcs = MAKE_VECTOR(kSsrcs1);
+ const std::vector<uint32_t> rtx_ssrcs = MAKE_VECTOR(kRtxSsrcs1);
+
+ StreamParams sp =
+ cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs);
+ sp.ssrcs = ssrcs; // Without RTXs, this is the important part.
+
+ EXPECT_FALSE(channel_->AddSendStream(sp));
+ EXPECT_FALSE(channel_->AddRecvStream(sp));
+}
+
+TEST_F(WebRtcVideoChannelTest, RejectsAddingStreamsWithOverlappingRtxSsrcs) {
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+
+ const std::vector<uint32_t> ssrcs = MAKE_VECTOR(kSsrcs1);
+ const std::vector<uint32_t> rtx_ssrcs = MAKE_VECTOR(kRtxSsrcs1);
+
+ StreamParams sp =
+ cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs);
+
+ EXPECT_TRUE(channel_->AddSendStream(sp));
+ EXPECT_TRUE(channel_->AddRecvStream(sp));
+
+ // The RTX SSRC is already used in previous streams, using it should fail.
+ sp = cricket::StreamParams::CreateLegacy(rtx_ssrcs[0]);
+ EXPECT_FALSE(channel_->AddSendStream(sp));
+ EXPECT_FALSE(channel_->AddRecvStream(sp));
+
+ // After removing the original stream this should be fine to add (makes sure
+ // that RTX ssrcs are not forever taken).
+ EXPECT_TRUE(channel_->RemoveSendStream(ssrcs[0]));
+ EXPECT_TRUE(channel_->RemoveRecvStream(ssrcs[0]));
+ EXPECT_TRUE(channel_->AddSendStream(sp));
+ EXPECT_TRUE(channel_->AddRecvStream(sp));
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ RejectsAddingStreamsWithOverlappingSimulcastSsrcs) {
+ static const uint32_t kFirstStreamSsrcs[] = {1, 2, 3};
+ static const uint32_t kOverlappingStreamSsrcs[] = {4, 3, 5};
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+
+ StreamParams sp =
+ cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kFirstStreamSsrcs));
+
+ EXPECT_TRUE(channel_->AddSendStream(sp));
+ EXPECT_TRUE(channel_->AddRecvStream(sp));
+
+ // One of the SSRCs is already used in previous streams, using it should fail.
+ sp = cricket::CreateSimStreamParams("cname",
+ MAKE_VECTOR(kOverlappingStreamSsrcs));
+ EXPECT_FALSE(channel_->AddSendStream(sp));
+ EXPECT_FALSE(channel_->AddRecvStream(sp));
+
+ // After removing the original stream this should be fine to add (makes sure
+ // that RTX ssrcs are not forever taken).
+ EXPECT_TRUE(channel_->RemoveSendStream(kFirstStreamSsrcs[0]));
+ EXPECT_TRUE(channel_->RemoveRecvStream(kFirstStreamSsrcs[0]));
+ EXPECT_TRUE(channel_->AddSendStream(sp));
+ EXPECT_TRUE(channel_->AddRecvStream(sp));
+}
+
+TEST_F(WebRtcVideoChannelTest, ReportsSsrcGroupsInStats) {
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+
+ static const uint32_t kSenderSsrcs[] = {4, 7, 10};
+ static const uint32_t kSenderRtxSsrcs[] = {5, 8, 11};
+
+ StreamParams sender_sp = cricket::CreateSimWithRtxStreamParams(
+ "cname", MAKE_VECTOR(kSenderSsrcs), MAKE_VECTOR(kSenderRtxSsrcs));
+
+ EXPECT_TRUE(channel_->AddSendStream(sender_sp));
+
+ static const uint32_t kReceiverSsrcs[] = {3};
+ static const uint32_t kReceiverRtxSsrcs[] = {2};
+
+ StreamParams receiver_sp = cricket::CreateSimWithRtxStreamParams(
+ "cname", MAKE_VECTOR(kReceiverSsrcs), MAKE_VECTOR(kReceiverRtxSsrcs));
+ EXPECT_TRUE(channel_->AddRecvStream(receiver_sp));
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+
+ ASSERT_EQ(1u, info.senders.size());
+ ASSERT_EQ(1u, info.receivers.size());
+
+ EXPECT_NE(sender_sp.ssrc_groups, receiver_sp.ssrc_groups);
+ EXPECT_EQ(sender_sp.ssrc_groups, info.senders[0].ssrc_groups);
+ EXPECT_EQ(receiver_sp.ssrc_groups, info.receivers[0].ssrc_groups);
+}
+
+TEST_F(WebRtcVideoChannelTest, MapsReceivedPayloadTypeToCodecName) {
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ webrtc::VideoReceiveStream::Stats stats;
+ cricket::VideoMediaInfo info;
+
+ // Report no codec name before receiving.
+ stream->SetStats(stats);
+ ASSERT_TRUE(channel_->GetStats(&info));
+ EXPECT_STREQ("", info.receivers[0].codec_name.c_str());
+
+ // Report VP8 if we're receiving it.
+ stats.current_payload_type = GetEngineCodec("VP8").id;
+ stream->SetStats(stats);
+ ASSERT_TRUE(channel_->GetStats(&info));
+ EXPECT_STREQ(kVp8CodecName, info.receivers[0].codec_name.c_str());
+
+ // Report no codec name for unknown playload types.
+ stats.current_payload_type = 3;
+ stream->SetStats(stats);
+ ASSERT_TRUE(channel_->GetStats(&info));
+ EXPECT_STREQ("", info.receivers[0].codec_name.c_str());
+}
+
+void WebRtcVideoChannelTest::TestReceiveUnsignaledSsrcPacket(
+ uint8_t payload_type,
+ bool expect_created_receive_stream) {
+ // kRedRtxPayloadType must currently be unused.
+ EXPECT_FALSE(FindCodecById(engine_.codecs(), kRedRtxPayloadType));
+
+ // Add a RED RTX codec.
+ VideoCodec red_rtx_codec =
+ VideoCodec::CreateRtxCodec(kRedRtxPayloadType, GetEngineCodec("red").id);
+ recv_parameters_.codecs.push_back(red_rtx_codec);
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+
+ ASSERT_EQ(0u, fake_call_->GetVideoReceiveStreams().size());
+ const size_t kDataLength = 12;
+ uint8_t data[kDataLength];
+ memset(data, 0, sizeof(data));
+
+ rtc::Set8(data, 1, payload_type);
+ rtc::SetBE32(&data[8], kIncomingUnsignalledSsrc);
+ rtc::CopyOnWriteBuffer packet(data, kDataLength);
+ rtc::PacketTime packet_time;
+ channel_->OnPacketReceived(&packet, packet_time);
+
+ if (expect_created_receive_stream) {
+ EXPECT_EQ(1u, fake_call_->GetVideoReceiveStreams().size())
+ << "Should have created a receive stream for payload type: "
+ << payload_type;
+ } else {
+ EXPECT_EQ(0u, fake_call_->GetVideoReceiveStreams().size())
+ << "Shouldn't have created a receive stream for payload type: "
+ << payload_type;
+ }
+}
+
+TEST_F(WebRtcVideoChannelTest, Vp8PacketCreatesUnsignalledStream) {
+ TestReceiveUnsignaledSsrcPacket(GetEngineCodec("VP8").id,
+ true /* expect_created_receive_stream */);
+}
+
+TEST_F(WebRtcVideoChannelTest, Vp9PacketCreatesUnsignalledStream) {
+ TestReceiveUnsignaledSsrcPacket(GetEngineCodec("VP9").id,
+ true /* expect_created_receive_stream */);
+}
+
+TEST_F(WebRtcVideoChannelTest, RtxPacketDoesntCreateUnsignalledStream) {
+ const cricket::VideoCodec vp8 = GetEngineCodec("VP8");
+ const int rtx_vp8_payload_type = default_apt_rtx_types_[vp8.id];
+ TestReceiveUnsignaledSsrcPacket(rtx_vp8_payload_type,
+ false /* expect_created_receive_stream */);
+}
+
+TEST_F(WebRtcVideoChannelTest, UlpfecPacketDoesntCreateUnsignalledStream) {
+ TestReceiveUnsignaledSsrcPacket(GetEngineCodec("ulpfec").id,
+ false /* expect_created_receive_stream */);
+}
+
+TEST_F(WebRtcVideoChannelFlexfecRecvTest,
+ FlexfecPacketDoesntCreateUnsignalledStream) {
+ TestReceiveUnsignaledSsrcPacket(GetEngineCodec("flexfec-03").id,
+ false /* expect_created_receive_stream */);
+}
+
+TEST_F(WebRtcVideoChannelTest, RedRtxPacketDoesntCreateUnsignalledStream) {
+ TestReceiveUnsignaledSsrcPacket(kRedRtxPayloadType,
+ false /* expect_created_receive_stream */);
+}
+
+// Test that receiving any unsignalled SSRC works even if it changes.
+// The first unsignalled SSRC received will create a default receive stream.
+// Any different unsignalled SSRC received will replace the default.
+TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) {
+ // Allow receiving VP8, VP9, H264 (if enabled).
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("VP9"));
+
+#if defined(WEBRTC_USE_H264)
+ cricket::VideoCodec H264codec(126, "H264");
+ parameters.codecs.push_back(H264codec);
+#endif
+
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ // No receive streams yet.
+ ASSERT_EQ(0u, fake_call_->GetVideoReceiveStreams().size());
+ cricket::FakeVideoRenderer renderer;
+ EXPECT_TRUE(channel_->SetSink(kDefaultRecvSsrc, &renderer));
+
+ // Receive VP8 packet on first SSRC.
+ uint8_t data[kMinRtpPacketLen];
+ cricket::RtpHeader rtpHeader;
+ rtpHeader.payload_type = GetEngineCodec("VP8").id;
+ rtpHeader.seq_num = rtpHeader.timestamp = 0;
+ rtpHeader.ssrc = kIncomingUnsignalledSsrc+1;
+ cricket::SetRtpHeader(data, sizeof(data), rtpHeader);
+ rtc::CopyOnWriteBuffer packet(data, sizeof(data));
+ rtc::PacketTime packet_time;
+ channel_->OnPacketReceived(&packet, packet_time);
+ // VP8 packet should create default receive stream.
+ ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size());
+ FakeVideoReceiveStream* recv_stream =
+ fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_EQ(rtpHeader.ssrc, recv_stream->GetConfig().rtp.remote_ssrc);
+ // Verify that the receive stream sinks to a renderer.
+ webrtc::VideoFrame video_frame(CreateBlackFrameBuffer(4, 4), 100, 0,
+ webrtc::kVideoRotation_0);
+ recv_stream->InjectFrame(video_frame);
+ EXPECT_EQ(1, renderer.num_rendered_frames());
+
+ // Receive VP9 packet on second SSRC.
+ rtpHeader.payload_type = GetEngineCodec("VP9").id;
+ rtpHeader.ssrc = kIncomingUnsignalledSsrc+2;
+ cricket::SetRtpHeader(data, sizeof(data), rtpHeader);
+ rtc::CopyOnWriteBuffer packet2(data, sizeof(data));
+ channel_->OnPacketReceived(&packet2, packet_time);
+ // VP9 packet should replace the default receive SSRC.
+ ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size());
+ recv_stream = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_EQ(rtpHeader.ssrc, recv_stream->GetConfig().rtp.remote_ssrc);
+ // Verify that the receive stream sinks to a renderer.
+ webrtc::VideoFrame video_frame2(CreateBlackFrameBuffer(4, 4), 200, 0,
+ webrtc::kVideoRotation_0);
+ recv_stream->InjectFrame(video_frame2);
+ EXPECT_EQ(2, renderer.num_rendered_frames());
+
+#if defined(WEBRTC_USE_H264)
+ // Receive H264 packet on third SSRC.
+ rtpHeader.payload_type = 126;
+ rtpHeader.ssrc = kIncomingUnsignalledSsrc+3;
+ cricket::SetRtpHeader(data, sizeof(data), rtpHeader);
+ rtc::CopyOnWriteBuffer packet3(data, sizeof(data));
+ channel_->OnPacketReceived(&packet3, packet_time);
+ // H264 packet should replace the default receive SSRC.
+ ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size());
+ recv_stream = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_EQ(rtpHeader.ssrc, recv_stream->GetConfig().rtp.remote_ssrc);
+ // Verify that the receive stream sinks to a renderer.
+ webrtc::VideoFrame video_frame3(CreateBlackFrameBuffer(4, 4), 300, 0,
+ webrtc::kVideoRotation_0);
+ recv_stream->InjectFrame(video_frame3);
+ EXPECT_EQ(3, renderer.num_rendered_frames());
+#endif
+}
+
+// This test verifies that when a new default stream is created for a new
+// unsignaled SSRC, the new stream does not overwrite any old stream that had
+// been the default receive stream before being properly signaled.
+TEST_F(WebRtcVideoChannelTest,
+ NewUnsignaledStreamDoesNotDestroyPreviouslyUnsignaledStream) {
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ ASSERT_TRUE(channel_->SetRecvParameters(parameters));
+
+ // No streams signaled and no packets received, so we should not have any
+ // stream objects created yet.
+ EXPECT_EQ(0u, fake_call_->GetVideoReceiveStreams().size());
+
+ // Receive packet on an unsignaled SSRC.
+ uint8_t data[kMinRtpPacketLen];
+ cricket::RtpHeader rtp_header;
+ rtp_header.payload_type = GetEngineCodec("VP8").id;
+ rtp_header.seq_num = rtp_header.timestamp = 0;
+ rtp_header.ssrc = kSsrcs3[0];
+ cricket::SetRtpHeader(data, sizeof(data), rtp_header);
+ rtc::CopyOnWriteBuffer packet(data, sizeof(data));
+ rtc::PacketTime packet_time;
+ channel_->OnPacketReceived(&packet, packet_time);
+ // Default receive stream should be created.
+ ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size());
+ FakeVideoReceiveStream* recv_stream0 =
+ fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_EQ(kSsrcs3[0], recv_stream0->GetConfig().rtp.remote_ssrc);
+
+ // Signal the SSRC.
+ EXPECT_TRUE(
+ channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrcs3[0])));
+ ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size());
+ recv_stream0 = fake_call_->GetVideoReceiveStreams()[0];
+ EXPECT_EQ(kSsrcs3[0], recv_stream0->GetConfig().rtp.remote_ssrc);
+
+ // Receive packet on a different unsignaled SSRC.
+ rtp_header.ssrc = kSsrcs3[1];
+ cricket::SetRtpHeader(data, sizeof(data), rtp_header);
+ packet.SetData(data, sizeof(data));
+ channel_->OnPacketReceived(&packet, packet_time);
+ // New default receive stream should be created, but old stream should remain.
+ ASSERT_EQ(2u, fake_call_->GetVideoReceiveStreams().size());
+ EXPECT_EQ(recv_stream0, fake_call_->GetVideoReceiveStreams()[0]);
+ FakeVideoReceiveStream* recv_stream1 =
+ fake_call_->GetVideoReceiveStreams()[1];
+ EXPECT_EQ(kSsrcs3[1], recv_stream1->GetConfig().rtp.remote_ssrc);
+}
+
+TEST_F(WebRtcVideoChannelTest, CanSentMaxBitrateForExistingStream) {
+ AddSendStream();
+
+ cricket::FakeVideoCapturer capturer;
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, &capturer));
+ cricket::VideoFormat capture_format_hd =
+ capturer.GetSupportedFormats()->front();
+ EXPECT_EQ(1280, capture_format_hd.width);
+ EXPECT_EQ(720, capture_format_hd.height);
+ EXPECT_EQ(cricket::CS_RUNNING, capturer.Start(capture_format_hd));
+ EXPECT_TRUE(channel_->SetSend(true));
+ capturer.CaptureFrame();
+
+ int default_encoder_bitrate = GetMaxEncoderBitrate();
+ EXPECT_GT(default_encoder_bitrate, 1000);
+
+ // TODO(skvlad): Resolve the inconsistency between the interpretation
+ // of the global bitrate limit for audio and video:
+ // - Audio: max_bandwidth_bps = 0 - fail the operation,
+ // max_bandwidth_bps = -1 - remove the bandwidth limit
+ // - Video: max_bandwidth_bps = 0 - remove the bandwidth limit,
+ // max_bandwidth_bps = -1 - remove the bandwidth limit
+
+ SetAndExpectMaxBitrate(1000, 0, 1000);
+ SetAndExpectMaxBitrate(1000, 800, 800);
+ SetAndExpectMaxBitrate(600, 800, 600);
+ SetAndExpectMaxBitrate(0, 800, 800);
+ SetAndExpectMaxBitrate(0, 0, default_encoder_bitrate);
+
+ EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
+}
+
+TEST_F(WebRtcVideoChannelTest, CannotSetMaxBitrateForNonexistentStream) {
+ webrtc::RtpParameters nonexistent_parameters =
+ channel_->GetRtpSendParameters(last_ssrc_);
+ EXPECT_EQ(0, nonexistent_parameters.encodings.size());
+
+ nonexistent_parameters.encodings.push_back(webrtc::RtpEncodingParameters());
+ EXPECT_FALSE(
+ channel_->SetRtpSendParameters(last_ssrc_, nonexistent_parameters));
+}
+
+TEST_F(WebRtcVideoChannelTest,
+ CannotSetRtpSendParametersWithIncorrectNumberOfEncodings) {
+ // This test verifies that setting RtpParameters succeeds only if
+ // the structure contains exactly one encoding.
+ // TODO(skvlad): Update this test when we start supporting setting parameters
+ // for each encoding individually.
+
+ AddSendStream();
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_);
+ // Two or more encodings should result in failure.
+ parameters.encodings.push_back(webrtc::RtpEncodingParameters());
+ EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters));
+ // Zero encodings should also fail.
+ parameters.encodings.clear();
+ EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters));
+}
+
+// Changing the SSRC through RtpParameters is not allowed.
+TEST_F(WebRtcVideoChannelTest, CannotSetSsrcInRtpSendParameters) {
+ AddSendStream();
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_);
+ parameters.encodings[0].ssrc = 0xdeadbeef;
+ EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters));
+}
+
+// Test that a stream will not be sending if its encoding is made inactive
+// through SetRtpSendParameters.
+// TODO(deadbeef): Update this test when we start supporting setting parameters
+// for each encoding individually.
+TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersEncodingsActive) {
+ FakeVideoSendStream* stream = AddSendStream();
+ EXPECT_TRUE(channel_->SetSend(true));
+ EXPECT_TRUE(stream->IsSending());
+
+ // Get current parameters and change "active" to false.
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_);
+ ASSERT_EQ(1u, parameters.encodings.size());
+ ASSERT_TRUE(parameters.encodings[0].active);
+ parameters.encodings[0].active = false;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters));
+ EXPECT_FALSE(stream->IsSending());
+
+ // Now change it back to active and verify we resume sending.
+ parameters.encodings[0].active = true;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters));
+ EXPECT_TRUE(stream->IsSending());
+}
+
+// Test that if a stream is reconfigured (due to a codec change or other
+// change) while its encoding is still inactive, it doesn't start sending.
+TEST_F(WebRtcVideoChannelTest,
+ InactiveStreamDoesntStartSendingWhenReconfigured) {
+ // Set an initial codec list, which will be modified later.
+ cricket::VideoSendParameters parameters1;
+ parameters1.codecs.push_back(GetEngineCodec("VP8"));
+ parameters1.codecs.push_back(GetEngineCodec("VP9"));
+ EXPECT_TRUE(channel_->SetSendParameters(parameters1));
+
+ FakeVideoSendStream* stream = AddSendStream();
+ EXPECT_TRUE(channel_->SetSend(true));
+ EXPECT_TRUE(stream->IsSending());
+
+ // Get current parameters and change "active" to false.
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_);
+ ASSERT_EQ(1u, parameters.encodings.size());
+ ASSERT_TRUE(parameters.encodings[0].active);
+ parameters.encodings[0].active = false;
+ EXPECT_EQ(1u, GetFakeSendStreams().size());
+ EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams());
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters));
+ EXPECT_FALSE(stream->IsSending());
+
+ // Reorder the codec list, causing the stream to be reconfigured.
+ cricket::VideoSendParameters parameters2;
+ parameters2.codecs.push_back(GetEngineCodec("VP9"));
+ parameters2.codecs.push_back(GetEngineCodec("VP8"));
+ EXPECT_TRUE(channel_->SetSendParameters(parameters2));
+ auto new_streams = GetFakeSendStreams();
+ // Assert that a new underlying stream was created due to the codec change.
+ // Otherwise, this test isn't testing what it set out to test.
+ EXPECT_EQ(1u, GetFakeSendStreams().size());
+ EXPECT_EQ(2, fake_call_->GetNumCreatedSendStreams());
+
+ // Verify that we still are not sending anything, due to the inactive
+ // encoding.
+ EXPECT_FALSE(new_streams[0]->IsSending());
+}
+
+// Test that GetRtpSendParameters returns the currently configured codecs.
+TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersCodecs) {
+ AddSendStream();
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("VP9"));
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+
+ webrtc::RtpParameters rtp_parameters =
+ channel_->GetRtpSendParameters(last_ssrc_);
+ ASSERT_EQ(2u, rtp_parameters.codecs.size());
+ EXPECT_EQ(GetEngineCodec("VP8").ToCodecParameters(),
+ rtp_parameters.codecs[0]);
+ EXPECT_EQ(GetEngineCodec("VP9").ToCodecParameters(),
+ rtp_parameters.codecs[1]);
+}
+
+// Test that RtpParameters for send stream has one encoding and it has
+// the correct SSRC.
+TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersSsrc) {
+ AddSendStream();
+
+ webrtc::RtpParameters rtp_parameters =
+ channel_->GetRtpSendParameters(last_ssrc_);
+ ASSERT_EQ(1u, rtp_parameters.encodings.size());
+ EXPECT_EQ(last_ssrc_, rtp_parameters.encodings[0].ssrc);
+}
+
+// Test that if we set/get parameters multiple times, we get the same results.
+TEST_F(WebRtcVideoChannelTest, SetAndGetRtpSendParameters) {
+ AddSendStream();
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("VP9"));
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+
+ webrtc::RtpParameters initial_params =
+ channel_->GetRtpSendParameters(last_ssrc_);
+
+ // We should be able to set the params we just got.
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, initial_params));
+
+ // ... And this shouldn't change the params returned by GetRtpSendParameters.
+ EXPECT_EQ(initial_params, channel_->GetRtpSendParameters(last_ssrc_));
+}
+
+// Test that GetRtpReceiveParameters returns the currently configured codecs.
+TEST_F(WebRtcVideoChannelTest, GetRtpReceiveParametersCodecs) {
+ AddRecvStream();
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("VP9"));
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ webrtc::RtpParameters rtp_parameters =
+ channel_->GetRtpReceiveParameters(last_ssrc_);
+ ASSERT_EQ(2u, rtp_parameters.codecs.size());
+ EXPECT_EQ(GetEngineCodec("VP8").ToCodecParameters(),
+ rtp_parameters.codecs[0]);
+ EXPECT_EQ(GetEngineCodec("VP9").ToCodecParameters(),
+ rtp_parameters.codecs[1]);
+}
+
+#if defined(WEBRTC_USE_H264)
+TEST_F(WebRtcVideoChannelTest, GetRtpReceiveFmtpSprop) {
+#else
+TEST_F(WebRtcVideoChannelTest, DISABLED_GetRtpReceiveFmtpSprop) {
+#endif
+ cricket::VideoRecvParameters parameters;
+ cricket::VideoCodec kH264sprop1(101, "H264");
+ kH264sprop1.SetParam(kH264FmtpSpropParameterSets, "uvw");
+ parameters.codecs.push_back(kH264sprop1);
+ cricket::VideoCodec kH264sprop2(102, "H264");
+ kH264sprop2.SetParam(kH264FmtpSpropParameterSets, "xyz");
+ parameters.codecs.push_back(kH264sprop2);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ FakeVideoReceiveStream* recv_stream = AddRecvStream();
+ const webrtc::VideoReceiveStream::Config& cfg = recv_stream->GetConfig();
+ webrtc::RtpParameters rtp_parameters =
+ channel_->GetRtpReceiveParameters(last_ssrc_);
+ ASSERT_EQ(2u, rtp_parameters.codecs.size());
+ EXPECT_EQ(kH264sprop1.ToCodecParameters(), rtp_parameters.codecs[0]);
+ ASSERT_EQ(2u, cfg.decoders.size());
+ EXPECT_EQ(101, cfg.decoders[0].payload_type);
+ EXPECT_EQ("H264", cfg.decoders[0].payload_name);
+ const auto it0 =
+ cfg.decoders[0].codec_params.find(kH264FmtpSpropParameterSets);
+ ASSERT_TRUE(it0 != cfg.decoders[0].codec_params.end());
+ EXPECT_EQ("uvw", it0->second);
+
+ EXPECT_EQ(102, cfg.decoders[1].payload_type);
+ EXPECT_EQ("H264", cfg.decoders[1].payload_name);
+ const auto it1 =
+ cfg.decoders[1].codec_params.find(kH264FmtpSpropParameterSets);
+ ASSERT_TRUE(it1 != cfg.decoders[1].codec_params.end());
+ EXPECT_EQ("xyz", it1->second);
+}
+
+// Test that RtpParameters for receive stream has one encoding and it has
+// the correct SSRC.
+TEST_F(WebRtcVideoChannelTest, GetRtpReceiveParametersSsrc) {
+ AddRecvStream();
+
+ webrtc::RtpParameters rtp_parameters =
+ channel_->GetRtpReceiveParameters(last_ssrc_);
+ ASSERT_EQ(1u, rtp_parameters.encodings.size());
+ EXPECT_EQ(last_ssrc_, rtp_parameters.encodings[0].ssrc);
+}
+
+// Test that if we set/get parameters multiple times, we get the same results.
+TEST_F(WebRtcVideoChannelTest, SetAndGetRtpReceiveParameters) {
+ AddRecvStream();
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("VP9"));
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ webrtc::RtpParameters initial_params =
+ channel_->GetRtpReceiveParameters(last_ssrc_);
+
+ // We should be able to set the params we just got.
+ EXPECT_TRUE(channel_->SetRtpReceiveParameters(last_ssrc_, initial_params));
+
+ // ... And this shouldn't change the params returned by
+ // GetRtpReceiveParameters.
+ EXPECT_EQ(initial_params, channel_->GetRtpReceiveParameters(last_ssrc_));
+}
+
+// Test that GetRtpReceiveParameters returns parameters correctly when SSRCs
+// aren't signaled. It should always return an empty "RtpEncodingParameters",
+// even after a packet is received and the unsignaled SSRC is known.
+TEST_F(WebRtcVideoChannelTest, GetRtpReceiveParametersWithUnsignaledSsrc) {
+ // Call necessary methods to configure receiving a default stream as
+ // soon as it arrives.
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(GetEngineCodec("VP8"));
+ parameters.codecs.push_back(GetEngineCodec("VP9"));
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ // Call GetRtpReceiveParameters before configured to receive an unsignaled
+ // stream. Should return nothing.
+ EXPECT_EQ(webrtc::RtpParameters(), channel_->GetRtpReceiveParameters(0));
+
+ // Set a sink for an unsignaled stream.
+ cricket::FakeVideoRenderer renderer;
+ // Value of "0" means "unsignaled stream".
+ EXPECT_TRUE(channel_->SetSink(0, &renderer));
+
+ // Call GetRtpReceiveParameters before the SSRC is known. Value of "0"
+ // in this method means "unsignaled stream".
+ webrtc::RtpParameters rtp_parameters = channel_->GetRtpReceiveParameters(0);
+ ASSERT_EQ(1u, rtp_parameters.encodings.size());
+ EXPECT_FALSE(rtp_parameters.encodings[0].ssrc);
+
+ // Receive VP8 packet.
+ uint8_t data[kMinRtpPacketLen];
+ cricket::RtpHeader rtpHeader;
+ rtpHeader.payload_type = GetEngineCodec("VP8").id;
+ rtpHeader.seq_num = rtpHeader.timestamp = 0;
+ rtpHeader.ssrc = kIncomingUnsignalledSsrc;
+ cricket::SetRtpHeader(data, sizeof(data), rtpHeader);
+ rtc::CopyOnWriteBuffer packet(data, sizeof(data));
+ rtc::PacketTime packet_time;
+ channel_->OnPacketReceived(&packet, packet_time);
+
+ // The |ssrc| member should still be unset.
+ rtp_parameters = channel_->GetRtpReceiveParameters(0);
+ ASSERT_EQ(1u, rtp_parameters.encodings.size());
+ EXPECT_FALSE(rtp_parameters.encodings[0].ssrc);
+}
+
+void WebRtcVideoChannelTest::TestReceiverLocalSsrcConfiguration(
+ bool receiver_first) {
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+
+ const uint32_t kSenderSsrc = 0xC0FFEE;
+ const uint32_t kSecondSenderSsrc = 0xBADCAFE;
+ const uint32_t kReceiverSsrc = 0x4711;
+ const uint32_t kExpectedDefaultReceiverSsrc = 1;
+
+ if (receiver_first) {
+ AddRecvStream(StreamParams::CreateLegacy(kReceiverSsrc));
+ std::vector<FakeVideoReceiveStream*> receive_streams =
+ fake_call_->GetVideoReceiveStreams();
+ ASSERT_EQ(1u, receive_streams.size());
+ // Default local SSRC when we have no sender.
+ EXPECT_EQ(kExpectedDefaultReceiverSsrc,
+ receive_streams[0]->GetConfig().rtp.local_ssrc);
+ }
+ AddSendStream(StreamParams::CreateLegacy(kSenderSsrc));
+ if (!receiver_first)
+ AddRecvStream(StreamParams::CreateLegacy(kReceiverSsrc));
+ std::vector<FakeVideoReceiveStream*> receive_streams =
+ fake_call_->GetVideoReceiveStreams();
+ ASSERT_EQ(1u, receive_streams.size());
+ EXPECT_EQ(kSenderSsrc, receive_streams[0]->GetConfig().rtp.local_ssrc);
+
+ // Removing first sender should fall back to another (in this case the second)
+ // local send stream's SSRC.
+ AddSendStream(StreamParams::CreateLegacy(kSecondSenderSsrc));
+ ASSERT_TRUE(channel_->RemoveSendStream(kSenderSsrc));
+ receive_streams =
+ fake_call_->GetVideoReceiveStreams();
+ ASSERT_EQ(1u, receive_streams.size());
+ EXPECT_EQ(kSecondSenderSsrc, receive_streams[0]->GetConfig().rtp.local_ssrc);
+
+ // Removing the last sender should fall back to default local SSRC.
+ ASSERT_TRUE(channel_->RemoveSendStream(kSecondSenderSsrc));
+ receive_streams =
+ fake_call_->GetVideoReceiveStreams();
+ ASSERT_EQ(1u, receive_streams.size());
+ EXPECT_EQ(kExpectedDefaultReceiverSsrc,
+ receive_streams[0]->GetConfig().rtp.local_ssrc);
+}
+
+TEST_F(WebRtcVideoChannelTest, ConfiguresLocalSsrc) {
+ TestReceiverLocalSsrcConfiguration(false);
+}
+
+TEST_F(WebRtcVideoChannelTest, ConfiguresLocalSsrcOnExistingReceivers) {
+ TestReceiverLocalSsrcConfiguration(true);
+}
+
+class WebRtcVideoChannelSimulcastTest : public testing::Test {
+ public:
+ WebRtcVideoChannelSimulcastTest()
+ : fake_call_(webrtc::Call::Config(&event_log_)),
+ encoder_factory_(new cricket::FakeWebRtcVideoEncoderFactory),
+ decoder_factory_(new cricket::FakeWebRtcVideoDecoderFactory),
+ engine_(std::unique_ptr<cricket::WebRtcVideoEncoderFactory>(
+ encoder_factory_),
+ std::unique_ptr<cricket::WebRtcVideoDecoderFactory>(
+ decoder_factory_)),
+ last_ssrc_(0) {}
+
+ void SetUp() override {
+ channel_.reset(
+ engine_.CreateChannel(&fake_call_, GetMediaConfig(), VideoOptions()));
+ channel_->OnReadyToSend(true);
+ last_ssrc_ = 123;
+ }
+
+ protected:
+ void VerifySimulcastSettings(const VideoCodec& codec,
+ int capture_width,
+ int capture_height,
+ size_t num_configured_streams,
+ size_t expected_num_streams,
+ bool screenshare,
+ bool conference_mode) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(codec);
+ parameters.conference_mode = conference_mode;
+ ASSERT_TRUE(channel_->SetSendParameters(parameters));
+
+ std::vector<uint32_t> ssrcs = MAKE_VECTOR(kSsrcs3);
+ RTC_DCHECK(num_configured_streams <= ssrcs.size());
+ ssrcs.resize(num_configured_streams);
+
+ AddSendStream(CreateSimStreamParams("cname", ssrcs));
+ // Send a full-size frame to trigger a stream reconfiguration to use all
+ // expected simulcast layers.
+ cricket::FakeVideoCapturer capturer;
+ VideoOptions options;
+ if (screenshare)
+ options.is_screencast = screenshare;
+ EXPECT_TRUE(
+ channel_->SetVideoSend(ssrcs.front(), true, &options, &capturer));
+ // Fetch the latest stream since SetVideoSend() may recreate it if the
+ // screen content setting is changed.
+ FakeVideoSendStream* stream = fake_call_.GetVideoSendStreams().front();
+ EXPECT_EQ(cricket::CS_RUNNING, capturer.Start(cricket::VideoFormat(
+ capture_width, capture_height,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420)));
+ channel_->SetSend(true);
+ EXPECT_TRUE(capturer.CaptureFrame());
+
+ std::vector<webrtc::VideoStream> video_streams = stream->GetVideoStreams();
+ ASSERT_EQ(expected_num_streams, video_streams.size());
+
+ std::vector<webrtc::VideoStream> expected_streams;
+ if (conference_mode) {
+ expected_streams = GetSimulcastConfig(
+ num_configured_streams, capture_width, capture_height, 0,
+ kDefaultQpMax, kDefaultVideoMaxFramerate, screenshare);
+ if (screenshare) {
+ for (const webrtc::VideoStream& stream : expected_streams) {
+ // Never scale screen content.
+ EXPECT_EQ(stream.width, capture_width);
+ EXPECT_EQ(stream.height, capture_height);
+ }
+ }
+ } else {
+ webrtc::VideoStream stream;
+ stream.width = capture_width;
+ stream.height = capture_height;
+ stream.max_framerate = kDefaultVideoMaxFramerate;
+ stream.min_bitrate_bps = cricket::kMinVideoBitrateBps;
+ int max_bitrate_kbps;
+ if (capture_width * capture_height <= 320 * 240) {
+ max_bitrate_kbps = 600;
+ } else if (capture_width * capture_height <= 640 * 480) {
+ max_bitrate_kbps = 1700;
+ } else if (capture_width * capture_height <= 960 * 540) {
+ max_bitrate_kbps = 2000;
+ } else {
+ max_bitrate_kbps = 2500;
+ }
+ stream.target_bitrate_bps = stream.max_bitrate_bps =
+ max_bitrate_kbps * 1000;
+ stream.max_qp = kDefaultQpMax;
+ expected_streams.push_back(stream);
+ }
+
+ ASSERT_EQ(expected_streams.size(), video_streams.size());
+
+ size_t num_streams = video_streams.size();
+ int total_max_bitrate_bps = 0;
+ for (size_t i = 0; i < num_streams; ++i) {
+ EXPECT_EQ(expected_streams[i].width, video_streams[i].width);
+ EXPECT_EQ(expected_streams[i].height, video_streams[i].height);
+
+ EXPECT_GT(video_streams[i].max_framerate, 0);
+ EXPECT_EQ(expected_streams[i].max_framerate,
+ video_streams[i].max_framerate);
+
+ EXPECT_GT(video_streams[i].min_bitrate_bps, 0);
+ EXPECT_EQ(expected_streams[i].min_bitrate_bps,
+ video_streams[i].min_bitrate_bps);
+
+ EXPECT_GT(video_streams[i].target_bitrate_bps, 0);
+ EXPECT_EQ(expected_streams[i].target_bitrate_bps,
+ video_streams[i].target_bitrate_bps);
+
+ EXPECT_GT(video_streams[i].max_bitrate_bps, 0);
+ EXPECT_EQ(expected_streams[i].max_bitrate_bps,
+ video_streams[i].max_bitrate_bps);
+
+ EXPECT_GT(video_streams[i].max_qp, 0);
+ EXPECT_EQ(expected_streams[i].max_qp, video_streams[i].max_qp);
+
+ EXPECT_EQ(!conference_mode,
+ expected_streams[i].temporal_layer_thresholds_bps.empty());
+ EXPECT_EQ(expected_streams[i].temporal_layer_thresholds_bps,
+ video_streams[i].temporal_layer_thresholds_bps);
+
+ if (i == num_streams - 1) {
+ total_max_bitrate_bps += video_streams[i].max_bitrate_bps;
+ } else {
+ total_max_bitrate_bps += video_streams[i].target_bitrate_bps;
+ }
+ }
+
+ EXPECT_TRUE(channel_->SetVideoSend(ssrcs.front(), true, nullptr, nullptr));
+ }
+
+ FakeVideoSendStream* AddSendStream() {
+ return AddSendStream(StreamParams::CreateLegacy(last_ssrc_++));
+ }
+
+ FakeVideoSendStream* AddSendStream(const StreamParams& sp) {
+ size_t num_streams =
+ fake_call_.GetVideoSendStreams().size();
+ EXPECT_TRUE(channel_->AddSendStream(sp));
+ std::vector<FakeVideoSendStream*> streams =
+ fake_call_.GetVideoSendStreams();
+ EXPECT_EQ(num_streams + 1, streams.size());
+ return streams[streams.size() - 1];
+ }
+
+ std::vector<FakeVideoSendStream*> GetFakeSendStreams() {
+ return fake_call_.GetVideoSendStreams();
+ }
+
+ FakeVideoReceiveStream* AddRecvStream() {
+ return AddRecvStream(StreamParams::CreateLegacy(last_ssrc_++));
+ }
+
+ FakeVideoReceiveStream* AddRecvStream(const StreamParams& sp) {
+ size_t num_streams =
+ fake_call_.GetVideoReceiveStreams().size();
+ EXPECT_TRUE(channel_->AddRecvStream(sp));
+ std::vector<FakeVideoReceiveStream*> streams =
+ fake_call_.GetVideoReceiveStreams();
+ EXPECT_EQ(num_streams + 1, streams.size());
+ return streams[streams.size() - 1];
+ }
+
+ webrtc::RtcEventLogNullImpl event_log_;
+ FakeCall fake_call_;
+ cricket::FakeWebRtcVideoEncoderFactory* encoder_factory_;
+ cricket::FakeWebRtcVideoDecoderFactory* decoder_factory_;
+ WebRtcVideoEngine engine_;
+ std::unique_ptr<VideoMediaChannel> channel_;
+ uint32_t last_ssrc_;
+};
+
+TEST_F(WebRtcVideoChannelSimulcastTest, SetSendCodecsWith2SimulcastStreams) {
+ VerifySimulcastSettings(cricket::VideoCodec("VP8"), 640, 360, 2, 2, false,
+ true);
+}
+
+TEST_F(WebRtcVideoChannelSimulcastTest, SetSendCodecsWith3SimulcastStreams) {
+ VerifySimulcastSettings(cricket::VideoCodec("VP8"), 1280, 720, 3, 3, false,
+ true);
+}
+
+// Test that we normalize send codec format size in simulcast.
+TEST_F(WebRtcVideoChannelSimulcastTest, SetSendCodecsWithOddSizeInSimulcast) {
+ VerifySimulcastSettings(cricket::VideoCodec("VP8"), 541, 271, 2, 2, false,
+ true);
+}
+
+TEST_F(WebRtcVideoChannelSimulcastTest, SetSendCodecsForScreenshare) {
+ VerifySimulcastSettings(cricket::VideoCodec("VP8"), 1280, 720, 3, 1, true,
+ false);
+}
+
+TEST_F(WebRtcVideoChannelSimulcastTest,
+ SetSendCodecsForConferenceModeScreenshare) {
+ VerifySimulcastSettings(cricket::VideoCodec("VP8"), 1280, 720, 3, 1, true,
+ true);
+}
+
+TEST_F(WebRtcVideoChannelSimulcastTest, SetSendCodecsForSimulcastScreenshare) {
+ webrtc::test::ScopedFieldTrials override_field_trials_(
+ "WebRTC-SimulcastScreenshare/Enabled/");
+ VerifySimulcastSettings(cricket::VideoCodec("VP8"), 1280, 720, 3, 2, true,
+ true);
+}
+
+TEST_F(WebRtcVideoChannelSimulcastTest,
+ NoSimulcastScreenshareWithoutConference) {
+ webrtc::test::ScopedFieldTrials override_field_trials_(
+ "WebRTC-SimulcastScreenshare/Enabled/");
+ VerifySimulcastSettings(cricket::VideoCodec("VP8"), 1280, 720, 3, 1, true,
+ false);
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvoe.h b/third_party/libwebrtc/webrtc/media/engine/webrtcvoe.h
new file mode 100644
index 0000000000..d303d30ccd
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvoe.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_WEBRTCVOE_H_
+#define MEDIA_ENGINE_WEBRTCVOE_H_
+
+#include <memory>
+
+#include "common_types.h" // NOLINT(build/include)
+#include "modules/audio_device/include/audio_device.h"
+#include "voice_engine/include/voe_base.h"
+#include "voice_engine/include/voe_errors.h"
+
+namespace cricket {
+// automatically handles lifetime of WebRtc VoiceEngine
+class scoped_voe_engine {
+ public:
+ explicit scoped_voe_engine(webrtc::VoiceEngine* e) : ptr(e) {}
+ // RTC_DCHECK, to ensure that there are no leaks at shutdown
+ ~scoped_voe_engine() {
+ if (ptr) {
+ const bool success = webrtc::VoiceEngine::Delete(ptr);
+ RTC_DCHECK(success);
+ }
+ }
+ // Releases the current pointer.
+ void reset() {
+ if (ptr) {
+ const bool success = webrtc::VoiceEngine::Delete(ptr);
+ RTC_DCHECK(success);
+ ptr = NULL;
+ }
+ }
+ webrtc::VoiceEngine* get() const { return ptr; }
+ private:
+ webrtc::VoiceEngine* ptr;
+};
+
+// unique_ptr-like class to handle obtaining and releasing WebRTC interface
+// pointers.
+template<class T>
+class scoped_voe_ptr {
+ public:
+ explicit scoped_voe_ptr(const scoped_voe_engine& e)
+ : ptr(T::GetInterface(e.get())) {}
+ explicit scoped_voe_ptr(T* p) : ptr(p) {}
+ ~scoped_voe_ptr() { if (ptr) ptr->Release(); }
+ T* operator->() const { return ptr; }
+ T* get() const { return ptr; }
+
+ // Releases the current pointer.
+ void reset() {
+ if (ptr) {
+ ptr->Release();
+ ptr = NULL;
+ }
+ }
+
+ private:
+ T* ptr;
+};
+
+// Utility class for aggregating the various WebRTC interface.
+// Fake implementations can also be injected for testing.
+class VoEWrapper {
+ public:
+ VoEWrapper()
+ : engine_(webrtc::VoiceEngine::Create()), base_(engine_) {
+ }
+ explicit VoEWrapper(webrtc::VoEBase* base) : engine_(NULL), base_(base) {}
+ ~VoEWrapper() {}
+ webrtc::VoiceEngine* engine() const { return engine_.get(); }
+ webrtc::VoEBase* base() const { return base_.get(); }
+
+ private:
+ scoped_voe_engine engine_;
+ scoped_voe_ptr<webrtc::VoEBase> base_;
+};
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_WEBRTCVOE_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvoiceengine.cc b/third_party/libwebrtc/webrtc/media/engine/webrtcvoiceengine.cc
new file mode 100644
index 0000000000..b02f2f5f15
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvoiceengine.cc
@@ -0,0 +1,2365 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifdef HAVE_WEBRTC_VOICE
+
+#include "media/engine/webrtcvoiceengine.h"
+
+#include <algorithm>
+#include <cstdio>
+#include <functional>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/call/audio_sink.h"
+#include "media/base/audiosource.h"
+#include "media/base/mediaconstants.h"
+#include "media/base/streamparams.h"
+#include "media/engine/adm_helpers.h"
+#include "media/engine/apm_helpers.h"
+#include "media/engine/payload_type_mapper.h"
+#include "media/engine/webrtcmediaengine.h"
+#include "media/engine/webrtcvoe.h"
+#include "modules/audio_device/audio_device_impl.h"
+#include "modules/audio_mixer/audio_mixer_impl.h"
+#include "modules/audio_processing/aec_dump/aec_dump_factory.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/base64.h"
+#include "rtc_base/byteorder.h"
+#include "rtc_base/constructormagic.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/race_checker.h"
+#include "rtc_base/stringencode.h"
+#include "rtc_base/stringutils.h"
+#include "rtc_base/trace_event.h"
+#include "system_wrappers/include/field_trial.h"
+#include "system_wrappers/include/metrics.h"
+#include "voice_engine/transmit_mixer.h"
+
+namespace cricket {
+namespace {
+
+constexpr size_t kMaxUnsignaledRecvStreams = 4;
+
+constexpr int kNackRtpHistoryMs = 5000;
+
+// Check to verify that the define for the intelligibility enhancer is properly
+// set.
+#if !defined(WEBRTC_INTELLIGIBILITY_ENHANCER) || \
+ (WEBRTC_INTELLIGIBILITY_ENHANCER != 0 && \
+ WEBRTC_INTELLIGIBILITY_ENHANCER != 1)
+#error "Set WEBRTC_INTELLIGIBILITY_ENHANCER to either 0 or 1"
+#endif
+
+// For SendSideBwe, Opus bitrate should be in the range between 6000 and 32000.
+const int kOpusMinBitrateBps = 6000;
+const int kOpusBitrateFbBps = 32000;
+
+// Default audio dscp value.
+// See http://tools.ietf.org/html/rfc2474 for details.
+// See also http://tools.ietf.org/html/draft-jennings-rtcweb-qos-00
+const rtc::DiffServCodePoint kAudioDscpValue = rtc::DSCP_EF;
+
+const int kMinTelephoneEventCode = 0; // RFC4733 (Section 2.3.1)
+const int kMaxTelephoneEventCode = 255;
+
+const int kMinPayloadType = 0;
+const int kMaxPayloadType = 127;
+
+class ProxySink : public webrtc::AudioSinkInterface {
+ public:
+ explicit ProxySink(AudioSinkInterface* sink) : sink_(sink) {
+ RTC_DCHECK(sink);
+ }
+
+ void OnData(const Data& audio) override { sink_->OnData(audio); }
+
+ private:
+ webrtc::AudioSinkInterface* sink_;
+};
+
+bool ValidateStreamParams(const StreamParams& sp) {
+ if (sp.ssrcs.empty()) {
+ RTC_LOG(LS_ERROR) << "No SSRCs in stream parameters: " << sp.ToString();
+ return false;
+ }
+ if (sp.ssrcs.size() > 1) {
+ RTC_LOG(LS_ERROR) << "Multiple SSRCs in stream parameters: "
+ << sp.ToString();
+ return false;
+ }
+ return true;
+}
+
+// Dumps an AudioCodec in RFC 2327-ish format.
+std::string ToString(const AudioCodec& codec) {
+ std::stringstream ss;
+ ss << codec.name << "/" << codec.clockrate << "/" << codec.channels;
+ if (!codec.params.empty()) {
+ ss << " {";
+ for (const auto& param : codec.params) {
+ ss << " " << param.first << "=" << param.second;
+ }
+ ss << " }";
+ }
+ ss << " (" << codec.id << ")";
+ return ss.str();
+}
+
+bool IsCodec(const AudioCodec& codec, const char* ref_name) {
+ return (_stricmp(codec.name.c_str(), ref_name) == 0);
+}
+
+bool FindCodec(const std::vector<AudioCodec>& codecs,
+ const AudioCodec& codec,
+ AudioCodec* found_codec) {
+ for (const AudioCodec& c : codecs) {
+ if (c.Matches(codec)) {
+ if (found_codec != NULL) {
+ *found_codec = c;
+ }
+ return true;
+ }
+ }
+ return false;
+}
+
+bool VerifyUniquePayloadTypes(const std::vector<AudioCodec>& codecs) {
+ if (codecs.empty()) {
+ return true;
+ }
+ std::vector<int> payload_types;
+ for (const AudioCodec& codec : codecs) {
+ payload_types.push_back(codec.id);
+ }
+ std::sort(payload_types.begin(), payload_types.end());
+ auto it = std::unique(payload_types.begin(), payload_types.end());
+ return it == payload_types.end();
+}
+
+rtc::Optional<std::string> GetAudioNetworkAdaptorConfig(
+ const AudioOptions& options) {
+ if (options.audio_network_adaptor && *options.audio_network_adaptor &&
+ options.audio_network_adaptor_config) {
+ // Turn on audio network adaptor only when |options_.audio_network_adaptor|
+ // equals true and |options_.audio_network_adaptor_config| has a value.
+ return options.audio_network_adaptor_config;
+ }
+ return rtc::nullopt;
+}
+
+webrtc::AudioState::Config MakeAudioStateConfig(
+ VoEWrapper* voe_wrapper,
+ rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer,
+ rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing) {
+ webrtc::AudioState::Config config;
+ config.voice_engine = voe_wrapper->engine();
+ if (audio_mixer) {
+ config.audio_mixer = audio_mixer;
+ } else {
+ config.audio_mixer = webrtc::AudioMixerImpl::Create();
+ }
+ config.audio_processing = audio_processing;
+ return config;
+}
+
+// |max_send_bitrate_bps| is the bitrate from "b=" in SDP.
+// |rtp_max_bitrate_bps| is the bitrate from RtpSender::SetParameters.
+rtc::Optional<int> ComputeSendBitrate(int max_send_bitrate_bps,
+ rtc::Optional<int> rtp_max_bitrate_bps,
+ const webrtc::AudioCodecSpec& spec) {
+ // If application-configured bitrate is set, take minimum of that and SDP
+ // bitrate.
+ const int bps =
+ rtp_max_bitrate_bps
+ ? webrtc::MinPositive(max_send_bitrate_bps, *rtp_max_bitrate_bps)
+ : max_send_bitrate_bps;
+ if (bps <= 0) {
+ return spec.info.default_bitrate_bps;
+ }
+
+ if (bps < spec.info.min_bitrate_bps) {
+ // If codec is not multi-rate and |bps| is less than the fixed bitrate then
+ // fail. If codec is not multi-rate and |bps| exceeds or equal the fixed
+ // bitrate then ignore.
+ RTC_LOG(LS_ERROR) << "Failed to set codec " << spec.format.name
+ << " to bitrate " << bps << " bps"
+ << ", requires at least " << spec.info.min_bitrate_bps
+ << " bps.";
+ return rtc::nullopt;
+ }
+
+ if (spec.info.HasFixedBitrate()) {
+ return spec.info.default_bitrate_bps;
+ } else {
+ // If codec is multi-rate then just set the bitrate.
+ return std::min(bps, spec.info.max_bitrate_bps);
+ }
+}
+
+} // namespace
+
+WebRtcVoiceEngine::WebRtcVoiceEngine(
+ webrtc::AudioDeviceModule* adm,
+ const rtc::scoped_refptr<webrtc::AudioEncoderFactory>& encoder_factory,
+ const rtc::scoped_refptr<webrtc::AudioDecoderFactory>& decoder_factory,
+ rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer,
+ rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing)
+ : WebRtcVoiceEngine(adm,
+ encoder_factory,
+ decoder_factory,
+ audio_mixer,
+ audio_processing,
+ nullptr) {}
+
+WebRtcVoiceEngine::WebRtcVoiceEngine(
+ webrtc::AudioDeviceModule* adm,
+ const rtc::scoped_refptr<webrtc::AudioEncoderFactory>& encoder_factory,
+ const rtc::scoped_refptr<webrtc::AudioDecoderFactory>& decoder_factory,
+ rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer,
+ rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing,
+ VoEWrapper* voe_wrapper)
+ : adm_(adm),
+ encoder_factory_(encoder_factory),
+ decoder_factory_(decoder_factory),
+ audio_mixer_(audio_mixer),
+ apm_(audio_processing),
+ voe_wrapper_(voe_wrapper) {
+ // This may be called from any thread, so detach thread checkers.
+ worker_thread_checker_.DetachFromThread();
+ signal_thread_checker_.DetachFromThread();
+ RTC_LOG(LS_INFO) << "WebRtcVoiceEngine::WebRtcVoiceEngine";
+ RTC_DCHECK(decoder_factory);
+ RTC_DCHECK(encoder_factory);
+ RTC_DCHECK(audio_processing);
+ // The rest of our initialization will happen in Init.
+}
+
+WebRtcVoiceEngine::~WebRtcVoiceEngine() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_LOG(LS_INFO) << "WebRtcVoiceEngine::~WebRtcVoiceEngine";
+ if (initialized_) {
+ StopAecDump();
+ voe_wrapper_->base()->Terminate();
+
+ // Stop AudioDevice.
+ adm()->StopPlayout();
+ adm()->StopRecording();
+ adm()->RegisterAudioCallback(nullptr);
+ adm()->Terminate();
+ }
+}
+
+void WebRtcVoiceEngine::Init() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_LOG(LS_INFO) << "WebRtcVoiceEngine::Init";
+
+ // TaskQueue expects to be created/destroyed on the same thread.
+ low_priority_worker_queue_.reset(
+ new rtc::TaskQueue("rtc-low-prio", rtc::TaskQueue::Priority::LOW));
+
+ // VoEWrapper needs to be created on the worker thread. It's expected to be
+ // null here unless it's being injected for testing.
+ if (!voe_wrapper_) {
+ voe_wrapper_.reset(new VoEWrapper());
+ }
+
+ // Load our audio codec lists.
+ RTC_LOG(LS_INFO) << "Supported send codecs in order of preference:";
+ send_codecs_ = CollectCodecs(encoder_factory_->GetSupportedEncoders());
+ for (const AudioCodec& codec : send_codecs_) {
+ RTC_LOG(LS_INFO) << ToString(codec);
+ }
+
+ RTC_LOG(LS_INFO) << "Supported recv codecs in order of preference:";
+ recv_codecs_ = CollectCodecs(decoder_factory_->GetSupportedDecoders());
+ for (const AudioCodec& codec : recv_codecs_) {
+ RTC_LOG(LS_INFO) << ToString(codec);
+ }
+
+ channel_config_.enable_voice_pacing = true;
+
+#if defined(WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE)
+ // No ADM supplied? Create a default one.
+ if (!adm_) {
+ adm_ = webrtc::AudioDeviceModule::Create(
+ webrtc::AudioDeviceModule::kPlatformDefaultAudio);
+ }
+#endif // WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE
+ RTC_CHECK(adm());
+ webrtc::adm_helpers::Init(adm());
+ webrtc::apm_helpers::Init(apm());
+ RTC_CHECK_EQ(0, voe_wrapper_->base()->Init(adm(), apm(), decoder_factory_));
+ transmit_mixer_ = voe_wrapper_->base()->transmit_mixer();
+ RTC_DCHECK(transmit_mixer_);
+
+ // Save the default AGC configuration settings. This must happen before
+ // calling ApplyOptions or the default will be overwritten.
+ default_agc_config_ = webrtc::apm_helpers::GetAgcConfig(apm());
+
+ // Set default engine options.
+ {
+ AudioOptions options;
+ options.echo_cancellation = true;
+ options.auto_gain_control = true;
+ options.noise_suppression = true;
+ options.highpass_filter = true;
+ options.stereo_swapping = false;
+ options.audio_jitter_buffer_max_packets = 50;
+ options.audio_jitter_buffer_fast_accelerate = false;
+ options.typing_detection = true;
+ options.adjust_agc_delta = 0;
+ options.experimental_agc = false;
+ options.extended_filter_aec = false;
+ options.delay_agnostic_aec = false;
+ options.experimental_ns = false;
+ options.intelligibility_enhancer = false;
+ options.level_control = false;
+ options.residual_echo_detector = true;
+ bool error = ApplyOptions(options);
+ RTC_DCHECK(error);
+ }
+
+ // May be null for VoE injected for testing.
+ if (voe()->engine()) {
+ audio_state_ = webrtc::AudioState::Create(
+ MakeAudioStateConfig(voe(), audio_mixer_, apm_));
+
+ // Connect the ADM to our audio path.
+ adm()->RegisterAudioCallback(audio_state_->audio_transport());
+ }
+
+ initialized_ = true;
+}
+
+rtc::scoped_refptr<webrtc::AudioState>
+ WebRtcVoiceEngine::GetAudioState() const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ return audio_state_;
+}
+
+VoiceMediaChannel* WebRtcVoiceEngine::CreateChannel(
+ webrtc::Call* call,
+ const MediaConfig& config,
+ const AudioOptions& options) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ return new WebRtcVoiceMediaChannel(this, config, options, call);
+}
+
+bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_LOG(LS_INFO) << "WebRtcVoiceEngine::ApplyOptions: "
+ << options_in.ToString();
+ AudioOptions options = options_in; // The options are modified below.
+
+ // Set and adjust echo canceller options.
+ // kEcConference is AEC with high suppression.
+ webrtc::EcModes ec_mode = webrtc::kEcConference;
+ if (options.aecm_generate_comfort_noise) {
+ RTC_LOG(LS_VERBOSE) << "Comfort noise explicitly set to "
+ << *options.aecm_generate_comfort_noise
+ << " (default is false).";
+ }
+
+#if defined(WEBRTC_IOS)
+ // On iOS, VPIO provides built-in EC.
+ options.echo_cancellation = false;
+ options.extended_filter_aec = false;
+ RTC_LOG(LS_INFO) << "Always disable AEC on iOS. Use built-in instead.";
+#elif defined(WEBRTC_ANDROID)
+ ec_mode = webrtc::kEcAecm;
+ options.extended_filter_aec = false;
+#endif
+
+ // Delay Agnostic AEC automatically turns on EC if not set except on iOS
+ // where the feature is not supported.
+ bool use_delay_agnostic_aec = false;
+#if !defined(WEBRTC_IOS)
+ if (options.delay_agnostic_aec) {
+ use_delay_agnostic_aec = *options.delay_agnostic_aec;
+ if (use_delay_agnostic_aec) {
+ options.echo_cancellation = true;
+ options.extended_filter_aec = true;
+ ec_mode = webrtc::kEcConference;
+ }
+ }
+#endif
+
+// Set and adjust noise suppressor options.
+#if defined(WEBRTC_IOS)
+ // On iOS, VPIO provides built-in NS.
+ options.noise_suppression = false;
+ options.typing_detection = false;
+ options.experimental_ns = false;
+ RTC_LOG(LS_INFO) << "Always disable NS on iOS. Use built-in instead.";
+#elif defined(WEBRTC_ANDROID)
+ options.typing_detection = false;
+ options.experimental_ns = false;
+#endif
+
+// Set and adjust gain control options.
+#if defined(WEBRTC_IOS)
+ // On iOS, VPIO provides built-in AGC.
+ options.auto_gain_control = false;
+ options.experimental_agc = false;
+ RTC_LOG(LS_INFO) << "Always disable AGC on iOS. Use built-in instead.";
+#elif defined(WEBRTC_ANDROID)
+ options.experimental_agc = false;
+#endif
+
+#if defined(WEBRTC_IOS) || defined(WEBRTC_ANDROID)
+ // Turn off the gain control if specified by the field trial.
+ // The purpose of the field trial is to reduce the amount of resampling
+ // performed inside the audio processing module on mobile platforms by
+ // whenever possible turning off the fixed AGC mode and the high-pass filter.
+ // (https://bugs.chromium.org/p/webrtc/issues/detail?id=6181).
+ if (webrtc::field_trial::IsEnabled(
+ "WebRTC-Audio-MinimizeResamplingOnMobile")) {
+ options.auto_gain_control = false;
+ RTC_LOG(LS_INFO) << "Disable AGC according to field trial.";
+ if (!(options.noise_suppression.value_or(false) ||
+ options.echo_cancellation.value_or(false))) {
+ // If possible, turn off the high-pass filter.
+ RTC_LOG(LS_INFO)
+ << "Disable high-pass filter in response to field trial.";
+ options.highpass_filter = false;
+ }
+ }
+#endif
+
+#if (WEBRTC_INTELLIGIBILITY_ENHANCER == 0)
+ // Hardcode the intelligibility enhancer to be off.
+ options.intelligibility_enhancer = false;
+#endif
+
+ if (options.echo_cancellation) {
+ // Check if platform supports built-in EC. Currently only supported on
+ // Android and in combination with Java based audio layer.
+ // TODO(henrika): investigate possibility to support built-in EC also
+ // in combination with Open SL ES audio.
+ const bool built_in_aec = adm()->BuiltInAECIsAvailable();
+ if (built_in_aec) {
+ // Built-in EC exists on this device and use_delay_agnostic_aec is not
+ // overriding it. Enable/Disable it according to the echo_cancellation
+ // audio option.
+ const bool enable_built_in_aec =
+ *options.echo_cancellation && !use_delay_agnostic_aec;
+ if (adm()->EnableBuiltInAEC(enable_built_in_aec) == 0 &&
+ enable_built_in_aec) {
+ // Disable internal software EC if built-in EC is enabled,
+ // i.e., replace the software EC with the built-in EC.
+ options.echo_cancellation = false;
+ RTC_LOG(LS_INFO)
+ << "Disabling EC since built-in EC will be used instead";
+ }
+ }
+ webrtc::apm_helpers::SetEcStatus(
+ apm(), *options.echo_cancellation, ec_mode);
+#if !defined(WEBRTC_ANDROID)
+ webrtc::apm_helpers::SetEcMetricsStatus(apm(), *options.echo_cancellation);
+#endif
+ if (ec_mode == webrtc::kEcAecm) {
+ bool cn = options.aecm_generate_comfort_noise.value_or(false);
+ webrtc::apm_helpers::SetAecmMode(apm(), cn);
+ }
+ }
+
+ if (options.auto_gain_control) {
+ bool built_in_agc_avaliable = adm()->BuiltInAGCIsAvailable();
+ if (built_in_agc_avaliable) {
+ if (adm()->EnableBuiltInAGC(*options.auto_gain_control) == 0 &&
+ *options.auto_gain_control) {
+ // Disable internal software AGC if built-in AGC is enabled,
+ // i.e., replace the software AGC with the built-in AGC.
+ options.auto_gain_control = false;
+ RTC_LOG(LS_INFO)
+ << "Disabling AGC since built-in AGC will be used instead";
+ }
+ }
+ webrtc::apm_helpers::SetAgcStatus(apm(), adm(), *options.auto_gain_control);
+ }
+
+ if (options.tx_agc_target_dbov || options.tx_agc_digital_compression_gain ||
+ options.tx_agc_limiter || options.adjust_agc_delta) {
+ // Override default_agc_config_. Generally, an unset option means "leave
+ // the VoE bits alone" in this function, so we want whatever is set to be
+ // stored as the new "default". If we didn't, then setting e.g.
+ // tx_agc_target_dbov would reset digital compression gain and limiter
+ // settings.
+ // Also, if we don't update default_agc_config_, then adjust_agc_delta
+ // would be an offset from the original values, and not whatever was set
+ // explicitly.
+ default_agc_config_.targetLeveldBOv = options.tx_agc_target_dbov.value_or(
+ default_agc_config_.targetLeveldBOv);
+ default_agc_config_.digitalCompressionGaindB =
+ options.tx_agc_digital_compression_gain.value_or(
+ default_agc_config_.digitalCompressionGaindB);
+ default_agc_config_.limiterEnable =
+ options.tx_agc_limiter.value_or(default_agc_config_.limiterEnable);
+
+ webrtc::AgcConfig config = default_agc_config_;
+ if (options.adjust_agc_delta) {
+ config.targetLeveldBOv -= *options.adjust_agc_delta;
+ RTC_LOG(LS_INFO) << "Adjusting AGC level from default -"
+ << default_agc_config_.targetLeveldBOv << "dB to -"
+ << config.targetLeveldBOv << "dB";
+ }
+ webrtc::apm_helpers::SetAgcConfig(apm(), config);
+ }
+
+ if (options.intelligibility_enhancer) {
+ intelligibility_enhancer_ = options.intelligibility_enhancer;
+ }
+ if (intelligibility_enhancer_ && *intelligibility_enhancer_) {
+ RTC_LOG(LS_INFO) << "Enabling NS when Intelligibility Enhancer is active.";
+ options.noise_suppression = intelligibility_enhancer_;
+ }
+
+ if (options.noise_suppression) {
+ if (adm()->BuiltInNSIsAvailable()) {
+ bool builtin_ns =
+ *options.noise_suppression &&
+ !(intelligibility_enhancer_ && *intelligibility_enhancer_);
+ if (adm()->EnableBuiltInNS(builtin_ns) == 0 && builtin_ns) {
+ // Disable internal software NS if built-in NS is enabled,
+ // i.e., replace the software NS with the built-in NS.
+ options.noise_suppression = false;
+ RTC_LOG(LS_INFO)
+ << "Disabling NS since built-in NS will be used instead";
+ }
+ }
+ webrtc::apm_helpers::SetNsStatus(apm(), *options.noise_suppression);
+ }
+
+ if (options.stereo_swapping) {
+ RTC_LOG(LS_INFO) << "Stereo swapping enabled? " << *options.stereo_swapping;
+ transmit_mixer()->EnableStereoChannelSwapping(*options.stereo_swapping);
+ }
+
+ if (options.audio_jitter_buffer_max_packets) {
+ RTC_LOG(LS_INFO) << "NetEq capacity is "
+ << *options.audio_jitter_buffer_max_packets;
+ channel_config_.acm_config.neteq_config.max_packets_in_buffer =
+ std::max(20, *options.audio_jitter_buffer_max_packets);
+ }
+ if (options.audio_jitter_buffer_fast_accelerate) {
+ RTC_LOG(LS_INFO) << "NetEq fast mode? "
+ << *options.audio_jitter_buffer_fast_accelerate;
+ channel_config_.acm_config.neteq_config.enable_fast_accelerate =
+ *options.audio_jitter_buffer_fast_accelerate;
+ }
+
+ if (options.typing_detection) {
+ RTC_LOG(LS_INFO) << "Typing detection is enabled? "
+ << *options.typing_detection;
+ webrtc::apm_helpers::SetTypingDetectionStatus(
+ apm(), *options.typing_detection);
+ }
+
+ webrtc::Config config;
+
+ if (options.delay_agnostic_aec)
+ delay_agnostic_aec_ = options.delay_agnostic_aec;
+ if (delay_agnostic_aec_) {
+ RTC_LOG(LS_INFO) << "Delay agnostic aec is enabled? "
+ << *delay_agnostic_aec_;
+ config.Set<webrtc::DelayAgnostic>(
+ new webrtc::DelayAgnostic(*delay_agnostic_aec_));
+ }
+
+ if (options.extended_filter_aec) {
+ extended_filter_aec_ = options.extended_filter_aec;
+ }
+ if (extended_filter_aec_) {
+ RTC_LOG(LS_INFO) << "Extended filter aec is enabled? "
+ << *extended_filter_aec_;
+ config.Set<webrtc::ExtendedFilter>(
+ new webrtc::ExtendedFilter(*extended_filter_aec_));
+ }
+
+ if (options.experimental_ns) {
+ experimental_ns_ = options.experimental_ns;
+ }
+ if (experimental_ns_) {
+ RTC_LOG(LS_INFO) << "Experimental ns is enabled? " << *experimental_ns_;
+ config.Set<webrtc::ExperimentalNs>(
+ new webrtc::ExperimentalNs(*experimental_ns_));
+ }
+
+ if (intelligibility_enhancer_) {
+ RTC_LOG(LS_INFO) << "Intelligibility Enhancer is enabled? "
+ << *intelligibility_enhancer_;
+ config.Set<webrtc::Intelligibility>(
+ new webrtc::Intelligibility(*intelligibility_enhancer_));
+ }
+
+ if (options.level_control) {
+ level_control_ = options.level_control;
+ }
+
+ webrtc::AudioProcessing::Config apm_config = apm()->GetConfig();
+
+ RTC_LOG(LS_INFO) << "Level control: "
+ << (!!level_control_ ? *level_control_ : -1);
+ if (level_control_) {
+ apm_config.level_controller.enabled = *level_control_;
+ if (options.level_control_initial_peak_level_dbfs) {
+ apm_config.level_controller.initial_peak_level_dbfs =
+ *options.level_control_initial_peak_level_dbfs;
+ }
+ }
+
+ if (options.highpass_filter) {
+ apm_config.high_pass_filter.enabled = *options.highpass_filter;
+ }
+
+ if (options.residual_echo_detector) {
+ apm_config.residual_echo_detector.enabled = *options.residual_echo_detector;
+ }
+
+ apm()->SetExtraOptions(config);
+ apm()->ApplyConfig(apm_config);
+ return true;
+}
+
+// TODO(solenberg): Remove, once AudioMonitor is gone.
+int WebRtcVoiceEngine::GetInputLevel() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ int8_t level = transmit_mixer()->AudioLevel();
+ RTC_DCHECK_LE(0, level);
+ return level;
+}
+
+const std::vector<AudioCodec>& WebRtcVoiceEngine::send_codecs() const {
+ RTC_DCHECK(signal_thread_checker_.CalledOnValidThread());
+ return send_codecs_;
+}
+
+const std::vector<AudioCodec>& WebRtcVoiceEngine::recv_codecs() const {
+ RTC_DCHECK(signal_thread_checker_.CalledOnValidThread());
+ return recv_codecs_;
+}
+
+RtpCapabilities WebRtcVoiceEngine::GetCapabilities() const {
+ RTC_DCHECK(signal_thread_checker_.CalledOnValidThread());
+ RtpCapabilities capabilities;
+ capabilities.header_extensions.push_back(
+ webrtc::RtpExtension(webrtc::RtpExtension::kAudioLevelUri,
+ webrtc::RtpExtension::kAudioLevelDefaultId));
+ if (webrtc::field_trial::IsEnabled("WebRTC-Audio-SendSideBwe")) {
+ capabilities.header_extensions.push_back(webrtc::RtpExtension(
+ webrtc::RtpExtension::kTransportSequenceNumberUri,
+ webrtc::RtpExtension::kTransportSequenceNumberDefaultId));
+ }
+ return capabilities;
+}
+
+void WebRtcVoiceEngine::RegisterChannel(WebRtcVoiceMediaChannel* channel) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(channel);
+ channels_.push_back(channel);
+}
+
+void WebRtcVoiceEngine::UnregisterChannel(WebRtcVoiceMediaChannel* channel) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ auto it = std::find(channels_.begin(), channels_.end(), channel);
+ RTC_DCHECK(it != channels_.end());
+ channels_.erase(it);
+}
+
+bool WebRtcVoiceEngine::StartAecDump(rtc::PlatformFile file,
+ int64_t max_size_bytes) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ auto aec_dump = webrtc::AecDumpFactory::Create(
+ file, max_size_bytes, low_priority_worker_queue_.get());
+ if (!aec_dump) {
+ return false;
+ }
+ apm()->AttachAecDump(std::move(aec_dump));
+ return true;
+}
+
+void WebRtcVoiceEngine::StartAecDump(const std::string& filename) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+
+ auto aec_dump = webrtc::AecDumpFactory::Create(
+ filename, -1, low_priority_worker_queue_.get());
+ if (aec_dump) {
+ apm()->AttachAecDump(std::move(aec_dump));
+ }
+}
+
+void WebRtcVoiceEngine::StopAecDump() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ apm()->DetachAecDump();
+}
+
+int WebRtcVoiceEngine::CreateVoEChannel() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ return voe_wrapper_->base()->CreateChannel(channel_config_);
+}
+
+webrtc::AudioDeviceModule* WebRtcVoiceEngine::adm() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(adm_);
+ return adm_.get();
+}
+
+webrtc::AudioProcessing* WebRtcVoiceEngine::apm() const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(apm_);
+ return apm_.get();
+}
+
+webrtc::voe::TransmitMixer* WebRtcVoiceEngine::transmit_mixer() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(transmit_mixer_);
+ return transmit_mixer_;
+}
+
+AudioCodecs WebRtcVoiceEngine::CollectCodecs(
+ const std::vector<webrtc::AudioCodecSpec>& specs) const {
+ PayloadTypeMapper mapper;
+ AudioCodecs out;
+
+ // Only generate CN payload types for these clockrates:
+ std::map<int, bool, std::greater<int>> generate_cn = {{ 8000, false },
+ { 16000, false },
+ { 32000, false }};
+ // Only generate telephone-event payload types for these clockrates:
+ std::map<int, bool, std::greater<int>> generate_dtmf = {{ 8000, false },
+ { 16000, false },
+ { 32000, false },
+ { 48000, false }};
+
+ auto map_format = [&mapper](const webrtc::SdpAudioFormat& format,
+ AudioCodecs* out) {
+ rtc::Optional<AudioCodec> opt_codec = mapper.ToAudioCodec(format);
+ if (opt_codec) {
+ if (out) {
+ out->push_back(*opt_codec);
+ }
+ } else {
+ RTC_LOG(LS_ERROR) << "Unable to assign payload type to format: "
+ << format;
+ }
+
+ return opt_codec;
+ };
+
+ for (const auto& spec : specs) {
+ // We need to do some extra stuff before adding the main codecs to out.
+ rtc::Optional<AudioCodec> opt_codec = map_format(spec.format, nullptr);
+ if (opt_codec) {
+ AudioCodec& codec = *opt_codec;
+ if (spec.info.supports_network_adaption) {
+ codec.AddFeedbackParam(
+ FeedbackParam(kRtcpFbParamTransportCc, kParamValueEmpty));
+ }
+
+ if (spec.info.allow_comfort_noise) {
+ // Generate a CN entry if the decoder allows it and we support the
+ // clockrate.
+ auto cn = generate_cn.find(spec.format.clockrate_hz);
+ if (cn != generate_cn.end()) {
+ cn->second = true;
+ }
+ }
+
+ // Generate a telephone-event entry if we support the clockrate.
+ auto dtmf = generate_dtmf.find(spec.format.clockrate_hz);
+ if (dtmf != generate_dtmf.end()) {
+ dtmf->second = true;
+ }
+
+ out.push_back(codec);
+ }
+ }
+
+ // Add CN codecs after "proper" audio codecs.
+ for (const auto& cn : generate_cn) {
+ if (cn.second) {
+ map_format({kCnCodecName, cn.first, 1}, &out);
+ }
+ }
+
+ // Add telephone-event codecs last.
+ for (const auto& dtmf : generate_dtmf) {
+ if (dtmf.second) {
+ map_format({kDtmfCodecName, dtmf.first, 1}, &out);
+ }
+ }
+
+ return out;
+}
+
+class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
+ : public AudioSource::Sink {
+ public:
+ WebRtcAudioSendStream(
+ int ch,
+ webrtc::AudioTransport* voe_audio_transport,
+ uint32_t ssrc,
+ const std::string& c_name,
+ const std::string track_id,
+ const rtc::Optional<webrtc::AudioSendStream::Config::SendCodecSpec>&
+ send_codec_spec,
+ const std::vector<webrtc::RtpExtension>& extensions,
+ int max_send_bitrate_bps,
+ const rtc::Optional<std::string>& audio_network_adaptor_config,
+ webrtc::Call* call,
+ webrtc::Transport* send_transport,
+ const rtc::scoped_refptr<webrtc::AudioEncoderFactory>& encoder_factory)
+ : voe_audio_transport_(voe_audio_transport),
+ call_(call),
+ config_(send_transport),
+ send_side_bwe_with_overhead_(
+ webrtc::field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead")),
+ max_send_bitrate_bps_(max_send_bitrate_bps),
+ rtp_parameters_(CreateRtpParametersWithOneEncoding()) {
+ RTC_DCHECK_GE(ch, 0);
+ // TODO(solenberg): Once we're not using FakeWebRtcVoiceEngine anymore:
+ // RTC_DCHECK(voe_audio_transport);
+ RTC_DCHECK(call);
+ RTC_DCHECK(encoder_factory);
+ config_.rtp.ssrc = ssrc;
+ config_.rtp.c_name = c_name;
+ config_.voe_channel_id = ch;
+ config_.rtp.extensions = extensions;
+ config_.audio_network_adaptor_config = audio_network_adaptor_config;
+ config_.encoder_factory = encoder_factory;
+ config_.track_id = track_id;
+ rtp_parameters_.encodings[0].ssrc = ssrc;
+
+ if (send_codec_spec) {
+ UpdateSendCodecSpec(*send_codec_spec);
+ }
+
+ stream_ = call_->CreateAudioSendStream(config_);
+ }
+
+ ~WebRtcAudioSendStream() override {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ ClearSource();
+ call_->DestroyAudioSendStream(stream_);
+ }
+
+ void SetSendCodecSpec(
+ const webrtc::AudioSendStream::Config::SendCodecSpec& send_codec_spec) {
+ UpdateSendCodecSpec(send_codec_spec);
+ ReconfigureAudioSendStream();
+ }
+
+ void SetRtpExtensions(const std::vector<webrtc::RtpExtension>& extensions) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ config_.rtp.extensions = extensions;
+ ReconfigureAudioSendStream();
+ }
+
+ void SetAudioNetworkAdaptorConfig(
+ const rtc::Optional<std::string>& audio_network_adaptor_config) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ if (config_.audio_network_adaptor_config == audio_network_adaptor_config) {
+ return;
+ }
+ config_.audio_network_adaptor_config = audio_network_adaptor_config;
+ UpdateAllowedBitrateRange();
+ ReconfigureAudioSendStream();
+ }
+
+ bool SetMaxSendBitrate(int bps) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(config_.send_codec_spec);
+ RTC_DCHECK(audio_codec_spec_);
+ auto send_rate = ComputeSendBitrate(
+ bps, rtp_parameters_.encodings[0].max_bitrate_bps, *audio_codec_spec_);
+
+ if (!send_rate) {
+ return false;
+ }
+
+ max_send_bitrate_bps_ = bps;
+
+ if (send_rate != config_.send_codec_spec->target_bitrate_bps) {
+ config_.send_codec_spec->target_bitrate_bps = send_rate;
+ ReconfigureAudioSendStream();
+ }
+ return true;
+ }
+
+ bool SendTelephoneEvent(int payload_type, int payload_freq, int event,
+ int duration_ms) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(stream_);
+ return stream_->SendTelephoneEvent(payload_type, payload_freq, event,
+ duration_ms);
+ }
+
+ void SetSend(bool send) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ send_ = send;
+ UpdateSendState();
+ }
+
+ void SetMuted(bool muted) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(stream_);
+ stream_->SetMuted(muted);
+ muted_ = muted;
+ }
+
+ bool muted() const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ return muted_;
+ }
+
+ webrtc::AudioSendStream::Stats GetStats(bool has_remote_tracks) const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(stream_);
+ return stream_->GetStats(has_remote_tracks);
+ }
+
+ // Starts the sending by setting ourselves as a sink to the AudioSource to
+ // get data callbacks.
+ // This method is called on the libjingle worker thread.
+ // TODO(xians): Make sure Start() is called only once.
+ void SetSource(AudioSource* source) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(source);
+ if (source_) {
+ RTC_DCHECK(source_ == source);
+ return;
+ }
+ source->SetSink(this);
+ source_ = source;
+ UpdateSendState();
+ }
+
+ // Stops sending by setting the sink of the AudioSource to nullptr. No data
+ // callback will be received after this method.
+ // This method is called on the libjingle worker thread.
+ void ClearSource() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ if (source_) {
+ source_->SetSink(nullptr);
+ source_ = nullptr;
+ }
+ UpdateSendState();
+ }
+
+ // AudioSource::Sink implementation.
+ // This method is called on the audio thread.
+ void OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames) override {
+ RTC_CHECK_RUNS_SERIALIZED(&audio_capture_race_checker_);
+ RTC_DCHECK(voe_audio_transport_);
+ voe_audio_transport_->PushCaptureData(config_.voe_channel_id, audio_data,
+ bits_per_sample, sample_rate,
+ number_of_channels, number_of_frames);
+ }
+
+ // Callback from the |source_| when it is going away. In case Start() has
+ // never been called, this callback won't be triggered.
+ void OnClose() override {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ // Set |source_| to nullptr to make sure no more callback will get into
+ // the source.
+ source_ = nullptr;
+ UpdateSendState();
+ }
+
+ // Accessor to the VoE channel ID.
+ int channel() const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ return config_.voe_channel_id;
+ }
+
+ const webrtc::RtpParameters& rtp_parameters() const {
+ return rtp_parameters_;
+ }
+
+ bool ValidateRtpParameters(const webrtc::RtpParameters& rtp_parameters) {
+ if (rtp_parameters.encodings.size() != 1) {
+ RTC_LOG(LS_ERROR)
+ << "Attempted to set RtpParameters without exactly one encoding";
+ return false;
+ }
+ if (rtp_parameters.encodings[0].ssrc != rtp_parameters_.encodings[0].ssrc) {
+ RTC_LOG(LS_ERROR) << "Attempted to set RtpParameters with modified SSRC";
+ return false;
+ }
+ return true;
+ }
+
+ bool SetRtpParameters(const webrtc::RtpParameters& parameters) {
+ if (!ValidateRtpParameters(parameters)) {
+ return false;
+ }
+
+ rtc::Optional<int> send_rate;
+ if (audio_codec_spec_) {
+ send_rate = ComputeSendBitrate(max_send_bitrate_bps_,
+ parameters.encodings[0].max_bitrate_bps,
+ *audio_codec_spec_);
+ if (!send_rate) {
+ return false;
+ }
+ }
+
+ const rtc::Optional<int> old_rtp_max_bitrate =
+ rtp_parameters_.encodings[0].max_bitrate_bps;
+
+ rtp_parameters_ = parameters;
+
+ if (rtp_parameters_.encodings[0].max_bitrate_bps != old_rtp_max_bitrate) {
+ // Reconfigure AudioSendStream with new bit rate.
+ if (send_rate) {
+ config_.send_codec_spec->target_bitrate_bps = send_rate;
+ }
+ UpdateAllowedBitrateRange();
+ ReconfigureAudioSendStream();
+ } else {
+ // parameters.encodings[0].active could have changed.
+ UpdateSendState();
+ }
+ return true;
+ }
+
+ private:
+ void UpdateSendState() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(stream_);
+ RTC_DCHECK_EQ(1UL, rtp_parameters_.encodings.size());
+ if (send_ && source_ != nullptr && rtp_parameters_.encodings[0].active) {
+ stream_->Start();
+ } else { // !send || source_ = nullptr
+ stream_->Stop();
+ }
+ }
+
+ void UpdateAllowedBitrateRange() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ const bool is_opus =
+ config_.send_codec_spec &&
+ !STR_CASE_CMP(config_.send_codec_spec->format.name.c_str(),
+ kOpusCodecName);
+ if (is_opus && webrtc::field_trial::IsEnabled("WebRTC-Audio-SendSideBwe")) {
+ config_.min_bitrate_bps = kOpusMinBitrateBps;
+
+ // This means that when RtpParameters is reset, we may change the
+ // encoder's bit rate immediately (through ReconfigureAudioSendStream()),
+ // meanwhile change the cap to the output of BWE.
+ config_.max_bitrate_bps =
+ rtp_parameters_.encodings[0].max_bitrate_bps
+ ? *rtp_parameters_.encodings[0].max_bitrate_bps
+ : kOpusBitrateFbBps;
+
+ // TODO(mflodman): Keep testing this and set proper values.
+ // Note: This is an early experiment currently only supported by Opus.
+ if (send_side_bwe_with_overhead_) {
+ const int max_packet_size_ms =
+ WEBRTC_OPUS_SUPPORT_120MS_PTIME ? 120 : 60;
+
+ // OverheadPerPacket = Ipv4(20B) + UDP(8B) + SRTP(10B) + RTP(12)
+ constexpr int kOverheadPerPacket = 20 + 8 + 10 + 12;
+
+ int min_overhead_bps =
+ kOverheadPerPacket * 8 * 1000 / max_packet_size_ms;
+
+ // We assume that |config_.max_bitrate_bps| before the next line is
+ // a hard limit on the payload bitrate, so we add min_overhead_bps to
+ // it to ensure that, when overhead is deducted, the payload rate
+ // never goes beyond the limit.
+ // Note: this also means that if a higher overhead is forced, we
+ // cannot reach the limit.
+ // TODO(minyue): Reconsider this when the signaling to BWE is done
+ // through a dedicated API.
+ config_.max_bitrate_bps += min_overhead_bps;
+
+ // In contrast to max_bitrate_bps, we let min_bitrate_bps always be
+ // reachable.
+ config_.min_bitrate_bps += min_overhead_bps;
+ }
+ }
+ }
+
+ void UpdateSendCodecSpec(
+ const webrtc::AudioSendStream::Config::SendCodecSpec& send_codec_spec) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ config_.rtp.nack.rtp_history_ms =
+ send_codec_spec.nack_enabled ? kNackRtpHistoryMs : 0;
+ config_.send_codec_spec = send_codec_spec;
+ auto info =
+ config_.encoder_factory->QueryAudioEncoder(send_codec_spec.format);
+ RTC_DCHECK(info);
+ // If a specific target bitrate has been set for the stream, use that as
+ // the new default bitrate when computing send bitrate.
+ if (send_codec_spec.target_bitrate_bps) {
+ info->default_bitrate_bps = std::max(
+ info->min_bitrate_bps,
+ std::min(info->max_bitrate_bps, *send_codec_spec.target_bitrate_bps));
+ }
+
+ audio_codec_spec_.emplace(
+ webrtc::AudioCodecSpec{send_codec_spec.format, *info});
+
+ config_.send_codec_spec->target_bitrate_bps = ComputeSendBitrate(
+ max_send_bitrate_bps_, rtp_parameters_.encodings[0].max_bitrate_bps,
+ *audio_codec_spec_);
+
+ UpdateAllowedBitrateRange();
+ }
+
+ void ReconfigureAudioSendStream() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(stream_);
+ stream_->Reconfigure(config_);
+ }
+
+ rtc::ThreadChecker worker_thread_checker_;
+ rtc::RaceChecker audio_capture_race_checker_;
+ webrtc::AudioTransport* const voe_audio_transport_ = nullptr;
+ webrtc::Call* call_ = nullptr;
+ webrtc::AudioSendStream::Config config_;
+ const bool send_side_bwe_with_overhead_;
+ // The stream is owned by WebRtcAudioSendStream and may be reallocated if
+ // configuration changes.
+ webrtc::AudioSendStream* stream_ = nullptr;
+
+ // Raw pointer to AudioSource owned by LocalAudioTrackHandler.
+ // PeerConnection will make sure invalidating the pointer before the object
+ // goes away.
+ AudioSource* source_ = nullptr;
+ bool send_ = false;
+ bool muted_ = false;
+ int max_send_bitrate_bps_;
+ webrtc::RtpParameters rtp_parameters_;
+ rtc::Optional<webrtc::AudioCodecSpec> audio_codec_spec_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcAudioSendStream);
+};
+
+class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream {
+ public:
+ WebRtcAudioReceiveStream(
+ int ch,
+ uint32_t remote_ssrc,
+ uint32_t local_ssrc,
+ bool use_transport_cc,
+ bool use_nack,
+ const std::string& sync_group,
+ const std::vector<webrtc::RtpExtension>& extensions,
+ webrtc::Call* call,
+ webrtc::Transport* rtcp_send_transport,
+ const rtc::scoped_refptr<webrtc::AudioDecoderFactory>& decoder_factory,
+ const std::map<int, webrtc::SdpAudioFormat>& decoder_map)
+ : call_(call), config_() {
+ RTC_DCHECK_GE(ch, 0);
+ RTC_DCHECK(call);
+ config_.rtp.remote_ssrc = remote_ssrc;
+ config_.rtp.local_ssrc = local_ssrc;
+ config_.rtp.transport_cc = use_transport_cc;
+ config_.rtp.nack.rtp_history_ms = use_nack ? kNackRtpHistoryMs : 0;
+ config_.rtp.extensions = extensions;
+ config_.rtcp_send_transport = rtcp_send_transport;
+ config_.voe_channel_id = ch;
+ config_.sync_group = sync_group;
+ config_.decoder_factory = decoder_factory;
+ config_.decoder_map = decoder_map;
+ RecreateAudioReceiveStream();
+ }
+
+ ~WebRtcAudioReceiveStream() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ call_->DestroyAudioReceiveStream(stream_);
+ }
+
+ void RecreateAudioReceiveStream(uint32_t local_ssrc) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ config_.rtp.local_ssrc = local_ssrc;
+ RecreateAudioReceiveStream();
+ }
+
+ void RecreateAudioReceiveStream(bool use_transport_cc, bool use_nack) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ config_.rtp.transport_cc = use_transport_cc;
+ config_.rtp.nack.rtp_history_ms = use_nack ? kNackRtpHistoryMs : 0;
+ RecreateAudioReceiveStream();
+ }
+
+ void RecreateAudioReceiveStream(
+ const std::vector<webrtc::RtpExtension>& extensions) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ config_.rtp.extensions = extensions;
+ RecreateAudioReceiveStream();
+ }
+
+ // Set a new payload type -> decoder map.
+ void RecreateAudioReceiveStream(
+ const std::map<int, webrtc::SdpAudioFormat>& decoder_map) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ config_.decoder_map = decoder_map;
+ RecreateAudioReceiveStream();
+ }
+
+ void MaybeRecreateAudioReceiveStream(const std::string& sync_group) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ if (config_.sync_group != sync_group) {
+ config_.sync_group = sync_group;
+ RecreateAudioReceiveStream();
+ }
+ }
+
+ webrtc::AudioReceiveStream::Stats GetStats() const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(stream_);
+ return stream_->GetStats();
+ }
+
+ int GetOutputLevel() const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(stream_);
+ return stream_->GetOutputLevel();
+ }
+
+ int channel() const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ return config_.voe_channel_id;
+ }
+
+ void SetRawAudioSink(std::unique_ptr<webrtc::AudioSinkInterface> sink) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ stream_->SetSink(std::move(sink));
+ }
+
+ void SetOutputVolume(double volume) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ stream_->SetGain(volume);
+ }
+
+ void SetPlayout(bool playout) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(stream_);
+ if (playout) {
+ RTC_LOG(LS_INFO) << "Starting playout for channel #" << channel();
+ stream_->Start();
+ } else {
+ RTC_LOG(LS_INFO) << "Stopping playout for channel #" << channel();
+ stream_->Stop();
+ }
+ playout_ = playout;
+ }
+
+ std::vector<webrtc::RtpSource> GetSources() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(stream_);
+ return stream_->GetSources();
+ }
+
+ private:
+ void RecreateAudioReceiveStream() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ if (stream_) {
+ call_->DestroyAudioReceiveStream(stream_);
+ }
+ stream_ = call_->CreateAudioReceiveStream(config_);
+ RTC_CHECK(stream_);
+ SetPlayout(playout_);
+ }
+
+ rtc::ThreadChecker worker_thread_checker_;
+ webrtc::Call* call_ = nullptr;
+ webrtc::AudioReceiveStream::Config config_;
+ // The stream is owned by WebRtcAudioReceiveStream and may be reallocated if
+ // configuration changes.
+ webrtc::AudioReceiveStream* stream_ = nullptr;
+ bool playout_ = false;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcAudioReceiveStream);
+};
+
+WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel(WebRtcVoiceEngine* engine,
+ const MediaConfig& config,
+ const AudioOptions& options,
+ webrtc::Call* call)
+ : VoiceMediaChannel(config), engine_(engine), call_(call) {
+ RTC_LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel";
+ RTC_DCHECK(call);
+ engine->RegisterChannel(this);
+ SetOptions(options);
+}
+
+WebRtcVoiceMediaChannel::~WebRtcVoiceMediaChannel() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::~WebRtcVoiceMediaChannel";
+ // TODO(solenberg): Should be able to delete the streams directly, without
+ // going through RemoveNnStream(), once stream objects handle
+ // all (de)configuration.
+ while (!send_streams_.empty()) {
+ RemoveSendStream(send_streams_.begin()->first);
+ }
+ while (!recv_streams_.empty()) {
+ RemoveRecvStream(recv_streams_.begin()->first);
+ }
+ engine()->UnregisterChannel(this);
+}
+
+rtc::DiffServCodePoint WebRtcVoiceMediaChannel::PreferredDscp() const {
+ return kAudioDscpValue;
+}
+
+bool WebRtcVoiceMediaChannel::SetSendParameters(
+ const AudioSendParameters& params) {
+ TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::SetSendParameters");
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_LOG(LS_INFO) << "WebRtcVoiceMediaChannel::SetSendParameters: "
+ << params.ToString();
+ // TODO(pthatcher): Refactor this to be more clean now that we have
+ // all the information at once.
+
+ if (!SetSendCodecs(params.codecs)) {
+ return false;
+ }
+
+ if (!ValidateRtpExtensions(params.extensions)) {
+ return false;
+ }
+ std::vector<webrtc::RtpExtension> filtered_extensions =
+ FilterRtpExtensions(params.extensions,
+ webrtc::RtpExtension::IsSupportedForAudio, true);
+ if (send_rtp_extensions_ != filtered_extensions) {
+ send_rtp_extensions_.swap(filtered_extensions);
+ for (auto& it : send_streams_) {
+ it.second->SetRtpExtensions(send_rtp_extensions_);
+ }
+ }
+
+ if (!SetMaxSendBitrate(params.max_bandwidth_bps)) {
+ return false;
+ }
+ return SetOptions(params.options);
+}
+
+bool WebRtcVoiceMediaChannel::SetRecvParameters(
+ const AudioRecvParameters& params) {
+ TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::SetRecvParameters");
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_LOG(LS_INFO) << "WebRtcVoiceMediaChannel::SetRecvParameters: "
+ << params.ToString();
+ // TODO(pthatcher): Refactor this to be more clean now that we have
+ // all the information at once.
+
+ if (!SetRecvCodecs(params.codecs)) {
+ return false;
+ }
+
+ if (!ValidateRtpExtensions(params.extensions)) {
+ return false;
+ }
+ std::vector<webrtc::RtpExtension> filtered_extensions =
+ FilterRtpExtensions(params.extensions,
+ webrtc::RtpExtension::IsSupportedForAudio, false);
+ if (recv_rtp_extensions_ != filtered_extensions) {
+ recv_rtp_extensions_.swap(filtered_extensions);
+ for (auto& it : recv_streams_) {
+ it.second->RecreateAudioReceiveStream(recv_rtp_extensions_);
+ }
+ }
+ return true;
+}
+
+webrtc::RtpParameters WebRtcVoiceMediaChannel::GetRtpSendParameters(
+ uint32_t ssrc) const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ auto it = send_streams_.find(ssrc);
+ if (it == send_streams_.end()) {
+ RTC_LOG(LS_WARNING) << "Attempting to get RTP send parameters for stream "
+ << "with ssrc " << ssrc << " which doesn't exist.";
+ return webrtc::RtpParameters();
+ }
+
+ webrtc::RtpParameters rtp_params = it->second->rtp_parameters();
+ // Need to add the common list of codecs to the send stream-specific
+ // RTP parameters.
+ for (const AudioCodec& codec : send_codecs_) {
+ rtp_params.codecs.push_back(codec.ToCodecParameters());
+ }
+ return rtp_params;
+}
+
+bool WebRtcVoiceMediaChannel::SetRtpSendParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ auto it = send_streams_.find(ssrc);
+ if (it == send_streams_.end()) {
+ RTC_LOG(LS_WARNING) << "Attempting to set RTP send parameters for stream "
+ << "with ssrc " << ssrc << " which doesn't exist.";
+ return false;
+ }
+
+ // TODO(deadbeef): Handle setting parameters with a list of codecs in a
+ // different order (which should change the send codec).
+ webrtc::RtpParameters current_parameters = GetRtpSendParameters(ssrc);
+ if (current_parameters.codecs != parameters.codecs) {
+ RTC_LOG(LS_ERROR) << "Using SetParameters to change the set of codecs "
+ << "is not currently supported.";
+ return false;
+ }
+
+ // TODO(minyue): The following legacy actions go into
+ // |WebRtcAudioSendStream::SetRtpParameters()| which is called at the end,
+ // though there are two difference:
+ // 1. |WebRtcVoiceMediaChannel::SetChannelSendParameters()| only calls
+ // |SetSendCodec| while |WebRtcAudioSendStream::SetRtpParameters()| calls
+ // |SetSendCodecs|. The outcome should be the same.
+ // 2. AudioSendStream can be recreated.
+
+ // Codecs are handled at the WebRtcVoiceMediaChannel level.
+ webrtc::RtpParameters reduced_params = parameters;
+ reduced_params.codecs.clear();
+ return it->second->SetRtpParameters(reduced_params);
+}
+
+webrtc::RtpParameters WebRtcVoiceMediaChannel::GetRtpReceiveParameters(
+ uint32_t ssrc) const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ webrtc::RtpParameters rtp_params;
+ // SSRC of 0 represents the default receive stream.
+ if (ssrc == 0) {
+ if (!default_sink_) {
+ RTC_LOG(LS_WARNING)
+ << "Attempting to get RTP parameters for the default, "
+ "unsignaled audio receive stream, but not yet "
+ "configured to receive such a stream.";
+ return rtp_params;
+ }
+ rtp_params.encodings.emplace_back();
+ } else {
+ auto it = recv_streams_.find(ssrc);
+ if (it == recv_streams_.end()) {
+ RTC_LOG(LS_WARNING)
+ << "Attempting to get RTP receive parameters for stream "
+ << "with ssrc " << ssrc << " which doesn't exist.";
+ return webrtc::RtpParameters();
+ }
+ rtp_params.encodings.emplace_back();
+ // TODO(deadbeef): Return stream-specific parameters.
+ rtp_params.encodings[0].ssrc = ssrc;
+ }
+
+ for (const AudioCodec& codec : recv_codecs_) {
+ rtp_params.codecs.push_back(codec.ToCodecParameters());
+ }
+ return rtp_params;
+}
+
+bool WebRtcVoiceMediaChannel::SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ // SSRC of 0 represents the default receive stream.
+ if (ssrc == 0) {
+ if (!default_sink_) {
+ RTC_LOG(LS_WARNING)
+ << "Attempting to set RTP parameters for the default, "
+ "unsignaled audio receive stream, but not yet "
+ "configured to receive such a stream.";
+ return false;
+ }
+ } else {
+ auto it = recv_streams_.find(ssrc);
+ if (it == recv_streams_.end()) {
+ RTC_LOG(LS_WARNING)
+ << "Attempting to set RTP receive parameters for stream "
+ << "with ssrc " << ssrc << " which doesn't exist.";
+ return false;
+ }
+ }
+
+ webrtc::RtpParameters current_parameters = GetRtpReceiveParameters(ssrc);
+ if (current_parameters != parameters) {
+ RTC_LOG(LS_ERROR) << "Changing the RTP receive parameters is currently "
+ << "unsupported.";
+ return false;
+ }
+ return true;
+}
+
+bool WebRtcVoiceMediaChannel::SetOptions(const AudioOptions& options) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_LOG(LS_INFO) << "Setting voice channel options: " << options.ToString();
+
+ // We retain all of the existing options, and apply the given ones
+ // on top. This means there is no way to "clear" options such that
+ // they go back to the engine default.
+ options_.SetAll(options);
+ if (!engine()->ApplyOptions(options_)) {
+ RTC_LOG(LS_WARNING)
+ << "Failed to apply engine options during channel SetOptions.";
+ return false;
+ }
+
+ rtc::Optional<std::string> audio_network_adaptor_config =
+ GetAudioNetworkAdaptorConfig(options_);
+ for (auto& it : send_streams_) {
+ it.second->SetAudioNetworkAdaptorConfig(audio_network_adaptor_config);
+ }
+
+ RTC_LOG(LS_INFO) << "Set voice channel options. Current options: "
+ << options_.ToString();
+ return true;
+}
+
+bool WebRtcVoiceMediaChannel::SetRecvCodecs(
+ const std::vector<AudioCodec>& codecs) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+
+ // Set the payload types to be used for incoming media.
+ RTC_LOG(LS_INFO) << "Setting receive voice codecs.";
+
+ if (!VerifyUniquePayloadTypes(codecs)) {
+ RTC_LOG(LS_ERROR) << "Codec payload types overlap.";
+ return false;
+ }
+
+ // Create a payload type -> SdpAudioFormat map with all the decoders. Fail
+ // unless the factory claims to support all decoders.
+ std::map<int, webrtc::SdpAudioFormat> decoder_map;
+ for (const AudioCodec& codec : codecs) {
+ // Log a warning if a codec's payload type is changing. This used to be
+ // treated as an error. It's abnormal, but not really illegal.
+ AudioCodec old_codec;
+ if (FindCodec(recv_codecs_, codec, &old_codec) &&
+ old_codec.id != codec.id) {
+ RTC_LOG(LS_WARNING) << codec.name << " mapped to a second payload type ("
+ << codec.id << ", was already mapped to "
+ << old_codec.id << ")";
+ }
+ auto format = AudioCodecToSdpAudioFormat(codec);
+ if (!IsCodec(codec, "cn") && !IsCodec(codec, "telephone-event") &&
+ !engine()->decoder_factory_->IsSupportedDecoder(format)) {
+ RTC_LOG(LS_ERROR) << "Unsupported codec: " << format;
+ return false;
+ }
+ // We allow adding new codecs but don't allow changing the payload type of
+ // codecs that are already configured since we might already be receiving
+ // packets with that payload type. See RFC3264, Section 8.3.2.
+ // TODO(deadbeef): Also need to check for clashes with previously mapped
+ // payload types, and not just currently mapped ones. For example, this
+ // should be illegal:
+ // 1. {100: opus/48000/2, 101: ISAC/16000}
+ // 2. {100: opus/48000/2}
+ // 3. {100: opus/48000/2, 101: ISAC/32000}
+ // Though this check really should happen at a higher level, since this
+ // conflict could happen between audio and video codecs.
+ auto existing = decoder_map_.find(codec.id);
+ if (existing != decoder_map_.end() && !existing->second.Matches(format)) {
+ RTC_LOG(LS_ERROR) << "Attempting to use payload type " << codec.id
+ << " for " << codec.name
+ << ", but it is already used for "
+ << existing->second.name;
+ return false;
+ }
+ decoder_map.insert({codec.id, std::move(format)});
+ }
+
+ if (decoder_map == decoder_map_) {
+ // There's nothing new to configure.
+ return true;
+ }
+
+ if (playout_) {
+ // Receive codecs can not be changed while playing. So we temporarily
+ // pause playout.
+ ChangePlayout(false);
+ }
+
+ decoder_map_ = std::move(decoder_map);
+ for (auto& kv : recv_streams_) {
+ kv.second->RecreateAudioReceiveStream(decoder_map_);
+ }
+ recv_codecs_ = codecs;
+
+ if (desired_playout_ && !playout_) {
+ ChangePlayout(desired_playout_);
+ }
+ return true;
+}
+
+// Utility function called from SetSendParameters() to extract current send
+// codec settings from the given list of codecs (originally from SDP). Both send
+// and receive streams may be reconfigured based on the new settings.
+bool WebRtcVoiceMediaChannel::SetSendCodecs(
+ const std::vector<AudioCodec>& codecs) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ dtmf_payload_type_ = rtc::nullopt;
+ dtmf_payload_freq_ = -1;
+
+ // Validate supplied codecs list.
+ for (const AudioCodec& codec : codecs) {
+ // TODO(solenberg): Validate more aspects of input - that payload types
+ // don't overlap, remove redundant/unsupported codecs etc -
+ // the same way it is done for RtpHeaderExtensions.
+ if (codec.id < kMinPayloadType || codec.id > kMaxPayloadType) {
+ RTC_LOG(LS_WARNING) << "Codec payload type out of range: "
+ << ToString(codec);
+ return false;
+ }
+ }
+
+ // Find PT of telephone-event codec with lowest clockrate, as a fallback, in
+ // case we don't have a DTMF codec with a rate matching the send codec's, or
+ // if this function returns early.
+ std::vector<AudioCodec> dtmf_codecs;
+ for (const AudioCodec& codec : codecs) {
+ if (IsCodec(codec, kDtmfCodecName)) {
+ dtmf_codecs.push_back(codec);
+ if (!dtmf_payload_type_ || codec.clockrate < dtmf_payload_freq_) {
+ dtmf_payload_type_ = codec.id;
+ dtmf_payload_freq_ = codec.clockrate;
+ }
+ }
+ }
+
+ // Scan through the list to figure out the codec to use for sending.
+ rtc::Optional<webrtc::AudioSendStream::Config::SendCodecSpec> send_codec_spec;
+ webrtc::Call::Config::BitrateConfig bitrate_config;
+ rtc::Optional<webrtc::AudioCodecInfo> voice_codec_info;
+ for (const AudioCodec& voice_codec : codecs) {
+ if (!(IsCodec(voice_codec, kCnCodecName) ||
+ IsCodec(voice_codec, kDtmfCodecName) ||
+ IsCodec(voice_codec, kRedCodecName))) {
+ webrtc::SdpAudioFormat format(voice_codec.name, voice_codec.clockrate,
+ voice_codec.channels, voice_codec.params);
+
+ voice_codec_info = engine()->encoder_factory_->QueryAudioEncoder(format);
+ if (!voice_codec_info) {
+ RTC_LOG(LS_WARNING) << "Unknown codec " << ToString(voice_codec);
+ continue;
+ }
+
+ send_codec_spec = webrtc::AudioSendStream::Config::SendCodecSpec(
+ voice_codec.id, format);
+ if (voice_codec.bitrate > 0) {
+ send_codec_spec->target_bitrate_bps = voice_codec.bitrate;
+ }
+ send_codec_spec->transport_cc_enabled = HasTransportCc(voice_codec);
+ send_codec_spec->nack_enabled = HasNack(voice_codec);
+ bitrate_config = GetBitrateConfigForCodec(voice_codec);
+ break;
+ }
+ }
+
+ if (!send_codec_spec) {
+ return false;
+ }
+
+ RTC_DCHECK(voice_codec_info);
+ if (voice_codec_info->allow_comfort_noise) {
+ // Loop through the codecs list again to find the CN codec.
+ // TODO(solenberg): Break out into a separate function?
+ for (const AudioCodec& cn_codec : codecs) {
+ if (IsCodec(cn_codec, kCnCodecName) &&
+ cn_codec.clockrate == send_codec_spec->format.clockrate_hz) {
+ switch (cn_codec.clockrate) {
+ case 8000:
+ case 16000:
+ case 32000:
+ send_codec_spec->cng_payload_type = cn_codec.id;
+ break;
+ default:
+ RTC_LOG(LS_WARNING)
+ << "CN frequency " << cn_codec.clockrate << " not supported.";
+ break;
+ }
+ break;
+ }
+ }
+
+ // Find the telephone-event PT exactly matching the preferred send codec.
+ for (const AudioCodec& dtmf_codec : dtmf_codecs) {
+ if (dtmf_codec.clockrate == send_codec_spec->format.clockrate_hz) {
+ dtmf_payload_type_ = dtmf_codec.id;
+ dtmf_payload_freq_ = dtmf_codec.clockrate;
+ break;
+ }
+ }
+ }
+
+ if (send_codec_spec_ != send_codec_spec) {
+ send_codec_spec_ = std::move(send_codec_spec);
+ // Apply new settings to all streams.
+ for (const auto& kv : send_streams_) {
+ kv.second->SetSendCodecSpec(*send_codec_spec_);
+ }
+ } else {
+ // If the codec isn't changing, set the start bitrate to -1 which means
+ // "unchanged" so that BWE isn't affected.
+ bitrate_config.start_bitrate_bps = -1;
+ }
+ call_->SetBitrateConfig(bitrate_config);
+
+ // Check if the transport cc feedback or NACK status has changed on the
+ // preferred send codec, and in that case reconfigure all receive streams.
+ if (recv_transport_cc_enabled_ != send_codec_spec_->transport_cc_enabled ||
+ recv_nack_enabled_ != send_codec_spec_->nack_enabled) {
+ RTC_LOG(LS_INFO) << "Recreate all the receive streams because the send "
+ "codec has changed.";
+ recv_transport_cc_enabled_ = send_codec_spec_->transport_cc_enabled;
+ recv_nack_enabled_ = send_codec_spec_->nack_enabled;
+ for (auto& kv : recv_streams_) {
+ kv.second->RecreateAudioReceiveStream(recv_transport_cc_enabled_,
+ recv_nack_enabled_);
+ }
+ }
+
+ send_codecs_ = codecs;
+ return true;
+}
+
+void WebRtcVoiceMediaChannel::SetPlayout(bool playout) {
+ desired_playout_ = playout;
+ return ChangePlayout(desired_playout_);
+}
+
+void WebRtcVoiceMediaChannel::ChangePlayout(bool playout) {
+ TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::ChangePlayout");
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ if (playout_ == playout) {
+ return;
+ }
+
+ for (const auto& kv : recv_streams_) {
+ kv.second->SetPlayout(playout);
+ }
+ playout_ = playout;
+}
+
+void WebRtcVoiceMediaChannel::SetSend(bool send) {
+ TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::SetSend");
+ if (send_ == send) {
+ return;
+ }
+
+ // Apply channel specific options, and initialize the ADM for recording (this
+ // may take time on some platforms, e.g. Android).
+ if (send) {
+ engine()->ApplyOptions(options_);
+
+ // InitRecording() may return an error if the ADM is already recording.
+ if (!engine()->adm()->RecordingIsInitialized() &&
+ !engine()->adm()->Recording()) {
+ if (engine()->adm()->InitRecording() != 0) {
+ RTC_LOG(LS_WARNING) << "Failed to initialize recording";
+ }
+ }
+ }
+
+ // Change the settings on each send channel.
+ for (auto& kv : send_streams_) {
+ kv.second->SetSend(send);
+ }
+
+ send_ = send;
+}
+
+bool WebRtcVoiceMediaChannel::SetAudioSend(uint32_t ssrc,
+ bool enable,
+ const AudioOptions* options,
+ AudioSource* source) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ // TODO(solenberg): The state change should be fully rolled back if any one of
+ // these calls fail.
+ if (!SetLocalSource(ssrc, source)) {
+ return false;
+ }
+ if (!MuteStream(ssrc, !enable)) {
+ return false;
+ }
+ if (enable && options) {
+ return SetOptions(*options);
+ }
+ return true;
+}
+
+int WebRtcVoiceMediaChannel::CreateVoEChannel() {
+ int id = engine()->CreateVoEChannel();
+ if (id == -1) {
+ RTC_LOG(LS_WARNING) << "CreateVoEChannel() failed.";
+ return -1;
+ }
+
+ return id;
+}
+
+bool WebRtcVoiceMediaChannel::DeleteVoEChannel(int channel) {
+ if (engine()->voe()->base()->DeleteChannel(channel) == -1) {
+ RTC_LOG(LS_WARNING) << "DeleteChannel(" << channel << ") failed.";
+ return false;
+ }
+ return true;
+}
+
+bool WebRtcVoiceMediaChannel::AddSendStream(const StreamParams& sp) {
+ TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::AddSendStream");
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_LOG(LS_INFO) << "AddSendStream: " << sp.ToString();
+
+ uint32_t ssrc = sp.first_ssrc();
+ RTC_DCHECK(0 != ssrc);
+
+ if (GetSendChannelId(ssrc) != -1) {
+ RTC_LOG(LS_ERROR) << "Stream already exists with ssrc " << ssrc;
+ return false;
+ }
+
+ // Create a new channel for sending audio data.
+ int channel = CreateVoEChannel();
+ if (channel == -1) {
+ return false;
+ }
+
+ // Save the channel to send_streams_, so that RemoveSendStream() can still
+ // delete the channel in case failure happens below.
+ webrtc::AudioTransport* audio_transport =
+ engine()->voe()->base()->audio_transport();
+
+ rtc::Optional<std::string> audio_network_adaptor_config =
+ GetAudioNetworkAdaptorConfig(options_);
+ WebRtcAudioSendStream* stream = new WebRtcAudioSendStream(
+ channel, audio_transport, ssrc, sp.cname, sp.id, send_codec_spec_,
+ send_rtp_extensions_, max_send_bitrate_bps_, audio_network_adaptor_config,
+ call_, this, engine()->encoder_factory_);
+ send_streams_.insert(std::make_pair(ssrc, stream));
+
+ // At this point the stream's local SSRC has been updated. If it is the first
+ // send stream, make sure that all the receive streams are updated with the
+ // same SSRC in order to send receiver reports.
+ if (send_streams_.size() == 1) {
+ receiver_reports_ssrc_ = ssrc;
+ for (const auto& kv : recv_streams_) {
+ // TODO(solenberg): Allow applications to set the RTCP SSRC of receive
+ // streams instead, so we can avoid recreating the streams here.
+ kv.second->RecreateAudioReceiveStream(ssrc);
+ }
+ }
+
+ send_streams_[ssrc]->SetSend(send_);
+ return true;
+}
+
+bool WebRtcVoiceMediaChannel::RemoveSendStream(uint32_t ssrc) {
+ TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::RemoveSendStream");
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_LOG(LS_INFO) << "RemoveSendStream: " << ssrc;
+
+ auto it = send_streams_.find(ssrc);
+ if (it == send_streams_.end()) {
+ RTC_LOG(LS_WARNING) << "Try to remove stream with ssrc " << ssrc
+ << " which doesn't exist.";
+ return false;
+ }
+
+ it->second->SetSend(false);
+
+ // TODO(solenberg): If we're removing the receiver_reports_ssrc_ stream, find
+ // the first active send stream and use that instead, reassociating receive
+ // streams.
+
+ // Clean up and delete the send stream+channel.
+ int channel = it->second->channel();
+ RTC_LOG(LS_INFO) << "Removing audio send stream " << ssrc
+ << " with VoiceEngine channel #" << channel << ".";
+ delete it->second;
+ send_streams_.erase(it);
+ if (!DeleteVoEChannel(channel)) {
+ return false;
+ }
+ if (send_streams_.empty()) {
+ SetSend(false);
+ }
+ return true;
+}
+
+bool WebRtcVoiceMediaChannel::AddRecvStream(const StreamParams& sp) {
+ TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::AddRecvStream");
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_LOG(LS_INFO) << "AddRecvStream: " << sp.ToString();
+
+ if (!ValidateStreamParams(sp)) {
+ return false;
+ }
+
+ const uint32_t ssrc = sp.first_ssrc();
+ if (ssrc == 0) {
+ RTC_LOG(LS_WARNING) << "AddRecvStream with ssrc==0 is not supported.";
+ return false;
+ }
+
+ // If this stream was previously received unsignaled, we promote it, possibly
+ // recreating the AudioReceiveStream, if sync_label has changed.
+ if (MaybeDeregisterUnsignaledRecvStream(ssrc)) {
+ recv_streams_[ssrc]->MaybeRecreateAudioReceiveStream(sp.sync_label);
+ return true;
+ }
+
+ if (GetReceiveChannelId(ssrc) != -1) {
+ RTC_LOG(LS_ERROR) << "Stream already exists with ssrc " << ssrc;
+ return false;
+ }
+
+ // Create a new channel for receiving audio data.
+ const int channel = CreateVoEChannel();
+ if (channel == -1) {
+ return false;
+ }
+
+ recv_streams_.insert(std::make_pair(
+ ssrc,
+ new WebRtcAudioReceiveStream(
+ channel, ssrc, receiver_reports_ssrc_, recv_transport_cc_enabled_,
+ recv_nack_enabled_, sp.sync_label, recv_rtp_extensions_, call_, this,
+ engine()->decoder_factory_, decoder_map_)));
+ recv_streams_[ssrc]->SetPlayout(playout_);
+
+ return true;
+}
+
+bool WebRtcVoiceMediaChannel::RemoveRecvStream(uint32_t ssrc) {
+ TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::RemoveRecvStream");
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_LOG(LS_INFO) << "RemoveRecvStream: " << ssrc;
+
+ const auto it = recv_streams_.find(ssrc);
+ if (it == recv_streams_.end()) {
+ RTC_LOG(LS_WARNING) << "Try to remove stream with ssrc " << ssrc
+ << " which doesn't exist.";
+ return false;
+ }
+
+ MaybeDeregisterUnsignaledRecvStream(ssrc);
+
+ const int channel = it->second->channel();
+
+ // Clean up and delete the receive stream+channel.
+ RTC_LOG(LS_INFO) << "Removing audio receive stream " << ssrc
+ << " with VoiceEngine channel #" << channel << ".";
+ it->second->SetRawAudioSink(nullptr);
+ delete it->second;
+ recv_streams_.erase(it);
+ return DeleteVoEChannel(channel);
+}
+
+bool WebRtcVoiceMediaChannel::SetLocalSource(uint32_t ssrc,
+ AudioSource* source) {
+ auto it = send_streams_.find(ssrc);
+ if (it == send_streams_.end()) {
+ if (source) {
+ // Return an error if trying to set a valid source with an invalid ssrc.
+ RTC_LOG(LS_ERROR) << "SetLocalSource failed with ssrc " << ssrc;
+ return false;
+ }
+
+ // The channel likely has gone away, do nothing.
+ return true;
+ }
+
+ if (source) {
+ it->second->SetSource(source);
+ } else {
+ it->second->ClearSource();
+ }
+
+ return true;
+}
+
+// TODO(solenberg): Remove, once AudioMonitor is gone.
+bool WebRtcVoiceMediaChannel::GetActiveStreams(
+ AudioInfo::StreamList* actives) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ actives->clear();
+ for (const auto& ch : recv_streams_) {
+ int level = ch.second->GetOutputLevel();
+ if (level > 0) {
+ actives->push_back(std::make_pair(ch.first, level));
+ }
+ }
+ return true;
+}
+
+// TODO(solenberg): Remove, once AudioMonitor is gone.
+int WebRtcVoiceMediaChannel::GetOutputLevel() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ int highest = 0;
+ for (const auto& ch : recv_streams_) {
+ highest = std::max(ch.second->GetOutputLevel(), highest);
+ }
+ return highest;
+}
+
+bool WebRtcVoiceMediaChannel::SetOutputVolume(uint32_t ssrc, double volume) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ std::vector<uint32_t> ssrcs(1, ssrc);
+ // SSRC of 0 represents the default receive stream.
+ if (ssrc == 0) {
+ default_recv_volume_ = volume;
+ ssrcs = unsignaled_recv_ssrcs_;
+ }
+ for (uint32_t ssrc : ssrcs) {
+ const auto it = recv_streams_.find(ssrc);
+ if (it == recv_streams_.end()) {
+ RTC_LOG(LS_WARNING) << "SetOutputVolume: no recv stream " << ssrc;
+ return false;
+ }
+ it->second->SetOutputVolume(volume);
+ RTC_LOG(LS_INFO) << "SetOutputVolume() to " << volume
+ << " for recv stream with ssrc " << ssrc;
+ }
+ return true;
+}
+
+bool WebRtcVoiceMediaChannel::CanInsertDtmf() {
+ return dtmf_payload_type_ ? true : false;
+}
+
+bool WebRtcVoiceMediaChannel::InsertDtmf(uint32_t ssrc, int event,
+ int duration) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_LOG(LS_INFO) << "WebRtcVoiceMediaChannel::InsertDtmf";
+ if (!dtmf_payload_type_) {
+ return false;
+ }
+
+ // Figure out which WebRtcAudioSendStream to send the event on.
+ auto it = ssrc != 0 ? send_streams_.find(ssrc) : send_streams_.begin();
+ if (it == send_streams_.end()) {
+ RTC_LOG(LS_WARNING) << "The specified ssrc " << ssrc << " is not in use.";
+ return false;
+ }
+ if (event < kMinTelephoneEventCode ||
+ event > kMaxTelephoneEventCode) {
+ RTC_LOG(LS_WARNING) << "DTMF event code " << event << " out of range.";
+ return false;
+ }
+ RTC_DCHECK_NE(-1, dtmf_payload_freq_);
+ return it->second->SendTelephoneEvent(*dtmf_payload_type_, dtmf_payload_freq_,
+ event, duration);
+}
+
+void WebRtcVoiceMediaChannel::OnPacketReceived(
+ rtc::CopyOnWriteBuffer* packet, const rtc::PacketTime& packet_time) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+
+ const webrtc::PacketTime webrtc_packet_time(packet_time.timestamp,
+ packet_time.not_before);
+ webrtc::PacketReceiver::DeliveryStatus delivery_result =
+ call_->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO,
+ packet->cdata(), packet->size(),
+ webrtc_packet_time);
+ if (delivery_result != webrtc::PacketReceiver::DELIVERY_UNKNOWN_SSRC) {
+ return;
+ }
+
+ // Create an unsignaled receive stream for this previously not received ssrc.
+ // If there already is N unsignaled receive streams, delete the oldest.
+ // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5208
+ uint32_t ssrc = 0;
+ if (!GetRtpSsrc(packet->cdata(), packet->size(), &ssrc)) {
+ return;
+ }
+ RTC_DCHECK(std::find(unsignaled_recv_ssrcs_.begin(),
+ unsignaled_recv_ssrcs_.end(), ssrc) == unsignaled_recv_ssrcs_.end());
+
+ // Add new stream.
+ StreamParams sp;
+ sp.ssrcs.push_back(ssrc);
+ RTC_LOG(LS_INFO) << "Creating unsignaled receive stream for SSRC=" << ssrc;
+ if (!AddRecvStream(sp)) {
+ RTC_LOG(LS_WARNING) << "Could not create unsignaled receive stream.";
+ return;
+ }
+ unsignaled_recv_ssrcs_.push_back(ssrc);
+ RTC_HISTOGRAM_COUNTS_LINEAR(
+ "WebRTC.Audio.NumOfUnsignaledStreams", unsignaled_recv_ssrcs_.size(), 1,
+ 100, 101);
+
+ // Remove oldest unsignaled stream, if we have too many.
+ if (unsignaled_recv_ssrcs_.size() > kMaxUnsignaledRecvStreams) {
+ uint32_t remove_ssrc = unsignaled_recv_ssrcs_.front();
+ RTC_LOG(LS_INFO) << "Removing unsignaled receive stream with SSRC="
+ << remove_ssrc;
+ RemoveRecvStream(remove_ssrc);
+ }
+ RTC_DCHECK_GE(kMaxUnsignaledRecvStreams, unsignaled_recv_ssrcs_.size());
+
+ SetOutputVolume(ssrc, default_recv_volume_);
+
+ // The default sink can only be attached to one stream at a time, so we hook
+ // it up to the *latest* unsignaled stream we've seen, in order to support the
+ // case where the SSRC of one unsignaled stream changes.
+ if (default_sink_) {
+ for (uint32_t drop_ssrc : unsignaled_recv_ssrcs_) {
+ auto it = recv_streams_.find(drop_ssrc);
+ it->second->SetRawAudioSink(nullptr);
+ }
+ std::unique_ptr<webrtc::AudioSinkInterface> proxy_sink(
+ new ProxySink(default_sink_.get()));
+ SetRawAudioSink(ssrc, std::move(proxy_sink));
+ }
+
+ delivery_result = call_->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO,
+ packet->cdata(),
+ packet->size(),
+ webrtc_packet_time);
+ RTC_DCHECK_NE(webrtc::PacketReceiver::DELIVERY_UNKNOWN_SSRC, delivery_result);
+}
+
+void WebRtcVoiceMediaChannel::OnRtcpReceived(
+ rtc::CopyOnWriteBuffer* packet, const rtc::PacketTime& packet_time) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+
+ // Forward packet to Call as well.
+ const webrtc::PacketTime webrtc_packet_time(packet_time.timestamp,
+ packet_time.not_before);
+ call_->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO,
+ packet->cdata(), packet->size(), webrtc_packet_time);
+}
+
+void WebRtcVoiceMediaChannel::OnNetworkRouteChanged(
+ const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ // TODO(zhihaung): Merge these two callbacks.
+ call_->OnNetworkRouteChanged(transport_name, network_route);
+ call_->OnTransportOverheadChanged(webrtc::MediaType::AUDIO,
+ network_route.packet_overhead);
+}
+
+bool WebRtcVoiceMediaChannel::MuteStream(uint32_t ssrc, bool muted) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ const auto it = send_streams_.find(ssrc);
+ if (it == send_streams_.end()) {
+ RTC_LOG(LS_WARNING) << "The specified ssrc " << ssrc << " is not in use.";
+ return false;
+ }
+ it->second->SetMuted(muted);
+
+ // TODO(solenberg):
+ // We set the AGC to mute state only when all the channels are muted.
+ // This implementation is not ideal, instead we should signal the AGC when
+ // the mic channel is muted/unmuted. We can't do it today because there
+ // is no good way to know which stream is mapping to the mic channel.
+ bool all_muted = muted;
+ for (const auto& kv : send_streams_) {
+ all_muted = all_muted && kv.second->muted();
+ }
+ engine()->apm()->set_output_will_be_muted(all_muted);
+
+ return true;
+}
+
+bool WebRtcVoiceMediaChannel::SetMaxSendBitrate(int bps) {
+ RTC_LOG(LS_INFO) << "WebRtcVoiceMediaChannel::SetMaxSendBitrate.";
+ max_send_bitrate_bps_ = bps;
+ bool success = true;
+ for (const auto& kv : send_streams_) {
+ if (!kv.second->SetMaxSendBitrate(max_send_bitrate_bps_)) {
+ success = false;
+ }
+ }
+ return success;
+}
+
+void WebRtcVoiceMediaChannel::OnReadyToSend(bool ready) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_LOG(LS_VERBOSE) << "OnReadyToSend: " << (ready ? "Ready." : "Not ready.");
+ call_->SignalChannelNetworkState(
+ webrtc::MediaType::AUDIO,
+ ready ? webrtc::kNetworkUp : webrtc::kNetworkDown);
+}
+
+bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info) {
+ TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::GetStats");
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(info);
+
+ // Get SSRC and stats for each sender.
+ RTC_DCHECK_EQ(info->senders.size(), 0U);
+ for (const auto& stream : send_streams_) {
+ webrtc::AudioSendStream::Stats stats =
+ stream.second->GetStats(recv_streams_.size() > 0);
+ VoiceSenderInfo sinfo;
+ sinfo.add_ssrc(stats.local_ssrc);
+ sinfo.bytes_sent = stats.bytes_sent;
+ sinfo.packets_sent = stats.packets_sent;
+ sinfo.packets_lost = stats.packets_lost;
+ sinfo.fraction_lost = stats.fraction_lost;
+ sinfo.codec_name = stats.codec_name;
+ sinfo.codec_payload_type = stats.codec_payload_type;
+ sinfo.ext_seqnum = stats.ext_seqnum;
+ sinfo.jitter_ms = stats.jitter_ms;
+ sinfo.rtt_ms = stats.rtt_ms;
+ sinfo.audio_level = stats.audio_level;
+ sinfo.total_input_energy = stats.total_input_energy;
+ sinfo.total_input_duration = stats.total_input_duration;
+ sinfo.typing_noise_detected = (send_ ? stats.typing_noise_detected : false);
+ sinfo.ana_statistics = stats.ana_statistics;
+ sinfo.apm_statistics = stats.apm_statistics;
+ info->senders.push_back(sinfo);
+ }
+
+ // Get SSRC and stats for each receiver.
+ RTC_DCHECK_EQ(info->receivers.size(), 0U);
+ for (const auto& stream : recv_streams_) {
+ uint32_t ssrc = stream.first;
+ // When SSRCs are unsignaled, there's only one audio MediaStreamTrack, but
+ // multiple RTP streams can be received over time (if the SSRC changes for
+ // whatever reason). We only want the RTCMediaStreamTrackStats to represent
+ // the stats for the most recent stream (the one whose audio is actually
+ // routed to the MediaStreamTrack), so here we ignore any unsignaled SSRCs
+ // except for the most recent one (last in the vector). This is somewhat of
+ // a hack, and means you don't get *any* stats for these inactive streams,
+ // but it's slightly better than the previous behavior, which was "highest
+ // SSRC wins".
+ // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=8158
+ if (!unsignaled_recv_ssrcs_.empty()) {
+ auto end_it = --unsignaled_recv_ssrcs_.end();
+ if (std::find(unsignaled_recv_ssrcs_.begin(), end_it, ssrc) != end_it) {
+ continue;
+ }
+ }
+ webrtc::AudioReceiveStream::Stats stats = stream.second->GetStats();
+ VoiceReceiverInfo rinfo;
+ rinfo.add_ssrc(stats.remote_ssrc);
+ rinfo.bytes_rcvd = stats.bytes_rcvd;
+ rinfo.packets_rcvd = stats.packets_rcvd;
+ rinfo.packets_lost = stats.packets_lost;
+ rinfo.fraction_lost = stats.fraction_lost;
+ rinfo.codec_name = stats.codec_name;
+ rinfo.codec_payload_type = stats.codec_payload_type;
+ rinfo.ext_seqnum = stats.ext_seqnum;
+ rinfo.jitter_ms = stats.jitter_ms;
+ rinfo.jitter_buffer_ms = stats.jitter_buffer_ms;
+ rinfo.jitter_buffer_preferred_ms = stats.jitter_buffer_preferred_ms;
+ rinfo.delay_estimate_ms = stats.delay_estimate_ms;
+ rinfo.audio_level = stats.audio_level;
+ rinfo.total_output_energy = stats.total_output_energy;
+ rinfo.total_samples_received = stats.total_samples_received;
+ rinfo.total_output_duration = stats.total_output_duration;
+ rinfo.concealed_samples = stats.concealed_samples;
+ rinfo.concealment_events = stats.concealment_events;
+ rinfo.jitter_buffer_delay_seconds = stats.jitter_buffer_delay_seconds;
+ rinfo.expand_rate = stats.expand_rate;
+ rinfo.speech_expand_rate = stats.speech_expand_rate;
+ rinfo.secondary_decoded_rate = stats.secondary_decoded_rate;
+ rinfo.secondary_discarded_rate = stats.secondary_discarded_rate;
+ rinfo.accelerate_rate = stats.accelerate_rate;
+ rinfo.preemptive_expand_rate = stats.preemptive_expand_rate;
+ rinfo.decoding_calls_to_silence_generator =
+ stats.decoding_calls_to_silence_generator;
+ rinfo.decoding_calls_to_neteq = stats.decoding_calls_to_neteq;
+ rinfo.decoding_normal = stats.decoding_normal;
+ rinfo.decoding_plc = stats.decoding_plc;
+ rinfo.decoding_cng = stats.decoding_cng;
+ rinfo.decoding_plc_cng = stats.decoding_plc_cng;
+ rinfo.decoding_muted_output = stats.decoding_muted_output;
+ rinfo.capture_start_ntp_time_ms = stats.capture_start_ntp_time_ms;
+ info->receivers.push_back(rinfo);
+ }
+
+ // Get codec info
+ for (const AudioCodec& codec : send_codecs_) {
+ webrtc::RtpCodecParameters codec_params = codec.ToCodecParameters();
+ info->send_codecs.insert(
+ std::make_pair(codec_params.payload_type, std::move(codec_params)));
+ }
+ for (const AudioCodec& codec : recv_codecs_) {
+ webrtc::RtpCodecParameters codec_params = codec.ToCodecParameters();
+ info->receive_codecs.insert(
+ std::make_pair(codec_params.payload_type, std::move(codec_params)));
+ }
+
+ return true;
+}
+
+void WebRtcVoiceMediaChannel::SetRawAudioSink(
+ uint32_t ssrc,
+ std::unique_ptr<webrtc::AudioSinkInterface> sink) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::SetRawAudioSink: ssrc:"
+ << ssrc << " " << (sink ? "(ptr)" : "NULL");
+ if (ssrc == 0) {
+ if (!unsignaled_recv_ssrcs_.empty()) {
+ std::unique_ptr<webrtc::AudioSinkInterface> proxy_sink(
+ sink ? new ProxySink(sink.get()) : nullptr);
+ SetRawAudioSink(unsignaled_recv_ssrcs_.back(), std::move(proxy_sink));
+ }
+ default_sink_ = std::move(sink);
+ return;
+ }
+ const auto it = recv_streams_.find(ssrc);
+ if (it == recv_streams_.end()) {
+ RTC_LOG(LS_WARNING) << "SetRawAudioSink: no recv stream " << ssrc;
+ return;
+ }
+ it->second->SetRawAudioSink(std::move(sink));
+}
+
+std::vector<webrtc::RtpSource> WebRtcVoiceMediaChannel::GetSources(
+ uint32_t ssrc) const {
+ auto it = recv_streams_.find(ssrc);
+ if (it == recv_streams_.end()) {
+ RTC_LOG(LS_ERROR) << "Attempting to get contributing sources for SSRC:"
+ << ssrc << " which doesn't exist.";
+ return std::vector<webrtc::RtpSource>();
+ }
+ return it->second->GetSources();
+}
+
+int WebRtcVoiceMediaChannel::GetReceiveChannelId(uint32_t ssrc) const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ const auto it = recv_streams_.find(ssrc);
+ if (it != recv_streams_.end()) {
+ return it->second->channel();
+ }
+ return -1;
+}
+
+int WebRtcVoiceMediaChannel::GetSendChannelId(uint32_t ssrc) const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ const auto it = send_streams_.find(ssrc);
+ if (it != send_streams_.end()) {
+ return it->second->channel();
+ }
+ return -1;
+}
+
+bool WebRtcVoiceMediaChannel::
+ MaybeDeregisterUnsignaledRecvStream(uint32_t ssrc) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ auto it = std::find(unsignaled_recv_ssrcs_.begin(),
+ unsignaled_recv_ssrcs_.end(),
+ ssrc);
+ if (it != unsignaled_recv_ssrcs_.end()) {
+ unsignaled_recv_ssrcs_.erase(it);
+ return true;
+ }
+ return false;
+}
+} // namespace cricket
+
+#endif // HAVE_WEBRTC_VOICE
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvoiceengine.h b/third_party/libwebrtc/webrtc/media/engine/webrtcvoiceengine.h
new file mode 100644
index 0000000000..8984299638
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvoiceengine.h
@@ -0,0 +1,304 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_ENGINE_WEBRTCVOICEENGINE_H_
+#define MEDIA_ENGINE_WEBRTCVOICEENGINE_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/audio_codecs/audio_encoder_factory.h"
+#include "api/rtpreceiverinterface.h"
+#include "call/audio_state.h"
+#include "call/call.h"
+#include "media/base/rtputils.h"
+#include "media/engine/apm_helpers.h"
+#include "media/engine/webrtcvoe.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "pc/channel.h"
+#include "rtc_base/buffer.h"
+#include "rtc_base/constructormagic.h"
+#include "rtc_base/networkroute.h"
+#include "rtc_base/scoped_ref_ptr.h"
+#include "rtc_base/task_queue.h"
+#include "rtc_base/thread_checker.h"
+
+namespace webrtc {
+namespace voe {
+class TransmitMixer;
+} // namespace voe
+} // namespace webrtc
+
+namespace cricket {
+
+class AudioDeviceModule;
+class AudioMixer;
+class AudioSource;
+class VoEWrapper;
+class WebRtcVoiceMediaChannel;
+
+// WebRtcVoiceEngine is a class to be used with CompositeMediaEngine.
+// It uses the WebRtc VoiceEngine library for audio handling.
+class WebRtcVoiceEngine final {
+ friend class WebRtcVoiceMediaChannel;
+ public:
+ WebRtcVoiceEngine(
+ webrtc::AudioDeviceModule* adm,
+ const rtc::scoped_refptr<webrtc::AudioEncoderFactory>& encoder_factory,
+ const rtc::scoped_refptr<webrtc::AudioDecoderFactory>& decoder_factory,
+ rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer,
+ rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing);
+ // Dependency injection for testing.
+ WebRtcVoiceEngine(
+ webrtc::AudioDeviceModule* adm,
+ const rtc::scoped_refptr<webrtc::AudioEncoderFactory>& encoder_factory,
+ const rtc::scoped_refptr<webrtc::AudioDecoderFactory>& decoder_factory,
+ rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer,
+ rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing,
+ VoEWrapper* voe_wrapper);
+ ~WebRtcVoiceEngine();
+
+ // Does initialization that needs to occur on the worker thread.
+ void Init();
+
+ rtc::scoped_refptr<webrtc::AudioState> GetAudioState() const;
+ VoiceMediaChannel* CreateChannel(webrtc::Call* call,
+ const MediaConfig& config,
+ const AudioOptions& options);
+
+ int GetInputLevel();
+
+ const std::vector<AudioCodec>& send_codecs() const;
+ const std::vector<AudioCodec>& recv_codecs() const;
+ RtpCapabilities GetCapabilities() const;
+
+ // For tracking WebRtc channels. Needed because we have to pause them
+ // all when switching devices.
+ // May only be called by WebRtcVoiceMediaChannel.
+ void RegisterChannel(WebRtcVoiceMediaChannel* channel);
+ void UnregisterChannel(WebRtcVoiceMediaChannel* channel);
+
+ VoEWrapper* voe() { return voe_wrapper_.get(); }
+
+ // Starts AEC dump using an existing file. A maximum file size in bytes can be
+ // specified. When the maximum file size is reached, logging is stopped and
+ // the file is closed. If max_size_bytes is set to <= 0, no limit will be
+ // used.
+ bool StartAecDump(rtc::PlatformFile file, int64_t max_size_bytes);
+
+ // Stops AEC dump.
+ void StopAecDump();
+
+ const webrtc::AudioProcessing::Config GetApmConfigForTest() const {
+ return apm()->GetConfig();
+ }
+
+ private:
+ // Every option that is "set" will be applied. Every option not "set" will be
+ // ignored. This allows us to selectively turn on and off different options
+ // easily at any time.
+ bool ApplyOptions(const AudioOptions& options);
+
+ void StartAecDump(const std::string& filename);
+ int CreateVoEChannel();
+
+ std::unique_ptr<rtc::TaskQueue> low_priority_worker_queue_;
+
+ webrtc::AudioDeviceModule* adm();
+ webrtc::AudioProcessing* apm() const;
+ webrtc::voe::TransmitMixer* transmit_mixer();
+
+ AudioCodecs CollectCodecs(
+ const std::vector<webrtc::AudioCodecSpec>& specs) const;
+
+ rtc::ThreadChecker signal_thread_checker_;
+ rtc::ThreadChecker worker_thread_checker_;
+
+ // The audio device manager.
+ rtc::scoped_refptr<webrtc::AudioDeviceModule> adm_;
+ rtc::scoped_refptr<webrtc::AudioEncoderFactory> encoder_factory_;
+ rtc::scoped_refptr<webrtc::AudioDecoderFactory> decoder_factory_;
+ rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer_;
+ // Reference to the APM, owned by VoE.
+ rtc::scoped_refptr<webrtc::AudioProcessing> apm_;
+ // Reference to the TransmitMixer, owned by VoE.
+ webrtc::voe::TransmitMixer* transmit_mixer_ = nullptr;
+ // The primary instance of WebRtc VoiceEngine.
+ std::unique_ptr<VoEWrapper> voe_wrapper_;
+ rtc::scoped_refptr<webrtc::AudioState> audio_state_;
+ std::vector<AudioCodec> send_codecs_;
+ std::vector<AudioCodec> recv_codecs_;
+ std::vector<WebRtcVoiceMediaChannel*> channels_;
+ webrtc::VoEBase::ChannelConfig channel_config_;
+ bool is_dumping_aec_ = false;
+ bool initialized_ = false;
+
+ webrtc::AgcConfig default_agc_config_;
+ // Cache received extended_filter_aec, delay_agnostic_aec, experimental_ns
+ // level controller, and intelligibility_enhancer values, and apply them
+ // in case they are missing in the audio options. We need to do this because
+ // SetExtraOptions() will revert to defaults for options which are not
+ // provided.
+ rtc::Optional<bool> extended_filter_aec_;
+ rtc::Optional<bool> delay_agnostic_aec_;
+ rtc::Optional<bool> experimental_ns_;
+ rtc::Optional<bool> intelligibility_enhancer_;
+ rtc::Optional<bool> level_control_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcVoiceEngine);
+};
+
+// WebRtcVoiceMediaChannel is an implementation of VoiceMediaChannel that uses
+// WebRtc Voice Engine.
+class WebRtcVoiceMediaChannel final : public VoiceMediaChannel,
+ public webrtc::Transport {
+ public:
+ WebRtcVoiceMediaChannel(WebRtcVoiceEngine* engine,
+ const MediaConfig& config,
+ const AudioOptions& options,
+ webrtc::Call* call);
+ ~WebRtcVoiceMediaChannel() override;
+
+ const AudioOptions& options() const { return options_; }
+
+ rtc::DiffServCodePoint PreferredDscp() const override;
+
+ bool SetSendParameters(const AudioSendParameters& params) override;
+ bool SetRecvParameters(const AudioRecvParameters& params) override;
+ webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override;
+ bool SetRtpSendParameters(uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) override;
+ webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const override;
+ bool SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) override;
+
+ void SetPlayout(bool playout) override;
+ void SetSend(bool send) override;
+ bool SetAudioSend(uint32_t ssrc,
+ bool enable,
+ const AudioOptions* options,
+ AudioSource* source) override;
+ bool AddSendStream(const StreamParams& sp) override;
+ bool RemoveSendStream(uint32_t ssrc) override;
+ bool AddRecvStream(const StreamParams& sp) override;
+ bool RemoveRecvStream(uint32_t ssrc) override;
+ bool GetActiveStreams(StreamList* actives) override;
+ int GetOutputLevel() override;
+ // SSRC=0 will apply the new volume to current and future unsignaled streams.
+ bool SetOutputVolume(uint32_t ssrc, double volume) override;
+
+ bool CanInsertDtmf() override;
+ bool InsertDtmf(uint32_t ssrc, int event, int duration) override;
+
+ void OnPacketReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) override;
+ void OnRtcpReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) override;
+ void OnNetworkRouteChanged(const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) override;
+ void OnReadyToSend(bool ready) override;
+ bool GetStats(VoiceMediaInfo* info) override;
+
+ // SSRC=0 will set the audio sink on the latest unsignaled stream, future or
+ // current. Only one stream at a time will use the sink.
+ void SetRawAudioSink(
+ uint32_t ssrc,
+ std::unique_ptr<webrtc::AudioSinkInterface> sink) override;
+
+ std::vector<webrtc::RtpSource> GetSources(uint32_t ssrc) const override;
+
+ // implements Transport interface
+ bool SendRtp(const uint8_t* data,
+ size_t len,
+ const webrtc::PacketOptions& options) override {
+ rtc::CopyOnWriteBuffer packet(data, len, kMaxRtpPacketLen);
+ rtc::PacketOptions rtc_options;
+ rtc_options.packet_id = options.packet_id;
+ return VoiceMediaChannel::SendPacket(&packet, rtc_options);
+ }
+
+ bool SendRtcp(const uint8_t* data, size_t len) override {
+ rtc::CopyOnWriteBuffer packet(data, len, kMaxRtpPacketLen);
+ return VoiceMediaChannel::SendRtcp(&packet, rtc::PacketOptions());
+ }
+
+ int GetReceiveChannelId(uint32_t ssrc) const;
+ int GetSendChannelId(uint32_t ssrc) const;
+
+ private:
+ bool SetOptions(const AudioOptions& options);
+ bool SetRecvCodecs(const std::vector<AudioCodec>& codecs);
+ bool SetSendCodecs(const std::vector<AudioCodec>& codecs);
+ bool SetLocalSource(uint32_t ssrc, AudioSource* source);
+ bool MuteStream(uint32_t ssrc, bool mute);
+
+ WebRtcVoiceEngine* engine() { return engine_; }
+ void ChangePlayout(bool playout);
+ int CreateVoEChannel();
+ bool DeleteVoEChannel(int channel);
+ bool SetMaxSendBitrate(int bps);
+ bool ValidateRtpParameters(const webrtc::RtpParameters& parameters);
+ void SetupRecording();
+ // Check if 'ssrc' is an unsignaled stream, and if so mark it as not being
+ // unsignaled anymore (i.e. it is now removed, or signaled), and return true.
+ bool MaybeDeregisterUnsignaledRecvStream(uint32_t ssrc);
+
+ rtc::ThreadChecker worker_thread_checker_;
+
+ WebRtcVoiceEngine* const engine_ = nullptr;
+ std::vector<AudioCodec> send_codecs_;
+
+ // TODO(kwiberg): decoder_map_ and recv_codecs_ store the exact same
+ // information, in slightly different formats. Eliminate recv_codecs_.
+ std::map<int, webrtc::SdpAudioFormat> decoder_map_;
+ std::vector<AudioCodec> recv_codecs_;
+
+ int max_send_bitrate_bps_ = 0;
+ AudioOptions options_;
+ rtc::Optional<int> dtmf_payload_type_;
+ int dtmf_payload_freq_ = -1;
+ bool recv_transport_cc_enabled_ = false;
+ bool recv_nack_enabled_ = false;
+ bool desired_playout_ = false;
+ bool playout_ = false;
+ bool send_ = false;
+ webrtc::Call* const call_ = nullptr;
+
+ // Queue of unsignaled SSRCs; oldest at the beginning.
+ std::vector<uint32_t> unsignaled_recv_ssrcs_;
+
+ // Volume for unsignaled streams, which may be set before the stream exists.
+ double default_recv_volume_ = 1.0;
+ // Sink for latest unsignaled stream - may be set before the stream exists.
+ std::unique_ptr<webrtc::AudioSinkInterface> default_sink_;
+ // Default SSRC to use for RTCP receiver reports in case of no signaled
+ // send streams. See: https://code.google.com/p/webrtc/issues/detail?id=4740
+ // and https://code.google.com/p/chromium/issues/detail?id=547661
+ uint32_t receiver_reports_ssrc_ = 0xFA17FA17u;
+
+ class WebRtcAudioSendStream;
+ std::map<uint32_t, WebRtcAudioSendStream*> send_streams_;
+ std::vector<webrtc::RtpExtension> send_rtp_extensions_;
+
+ class WebRtcAudioReceiveStream;
+ std::map<uint32_t, WebRtcAudioReceiveStream*> recv_streams_;
+ std::vector<webrtc::RtpExtension> recv_rtp_extensions_;
+
+ rtc::Optional<webrtc::AudioSendStream::Config::SendCodecSpec>
+ send_codec_spec_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcVoiceMediaChannel);
+};
+} // namespace cricket
+
+#endif // MEDIA_ENGINE_WEBRTCVOICEENGINE_H_
diff --git a/third_party/libwebrtc/webrtc/media/engine/webrtcvoiceengine_unittest.cc b/third_party/libwebrtc/webrtc/media/engine/webrtcvoiceengine_unittest.cc
new file mode 100644
index 0000000000..d604d5b919
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/engine/webrtcvoiceengine_unittest.cc
@@ -0,0 +1,3575 @@
+/*
+ * Copyright (c) 2008 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <utility>
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "call/call.h"
+#include "logging/rtc_event_log/rtc_event_log.h"
+#include "media/base/fakemediaengine.h"
+#include "media/base/fakenetworkinterface.h"
+#include "media/base/fakertp.h"
+#include "media/base/mediaconstants.h"
+#include "media/engine/fakewebrtccall.h"
+#include "media/engine/fakewebrtcvoiceengine.h"
+#include "media/engine/webrtcvoiceengine.h"
+#include "modules/audio_device/include/mock_audio_device.h"
+#include "modules/audio_processing/include/mock_audio_processing.h"
+#include "pc/channel.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/byteorder.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/scoped_ref_ptr.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/mock_audio_decoder_factory.h"
+#include "test/mock_audio_encoder_factory.h"
+#include "voice_engine/transmit_mixer.h"
+
+using testing::_;
+using testing::ContainerEq;
+using testing::Return;
+using testing::ReturnPointee;
+using testing::SaveArg;
+using testing::StrictMock;
+
+namespace {
+
+constexpr uint32_t kMaxUnsignaledRecvStreams = 4;
+
+const cricket::AudioCodec kPcmuCodec(0, "PCMU", 8000, 64000, 1);
+const cricket::AudioCodec kIsacCodec(103, "ISAC", 16000, 32000, 1);
+const cricket::AudioCodec kOpusCodec(111, "opus", 48000, 32000, 2);
+const cricket::AudioCodec kG722CodecVoE(9, "G722", 16000, 64000, 1);
+const cricket::AudioCodec kG722CodecSdp(9, "G722", 8000, 64000, 1);
+const cricket::AudioCodec kCn8000Codec(13, "CN", 8000, 0, 1);
+const cricket::AudioCodec kCn16000Codec(105, "CN", 16000, 0, 1);
+const cricket::AudioCodec
+ kTelephoneEventCodec1(106, "telephone-event", 8000, 0, 1);
+const cricket::AudioCodec
+ kTelephoneEventCodec2(107, "telephone-event", 32000, 0, 1);
+
+const uint32_t kSsrc0 = 0;
+const uint32_t kSsrc1 = 1;
+const uint32_t kSsrcX = 0x99;
+const uint32_t kSsrcY = 0x17;
+const uint32_t kSsrcZ = 0x42;
+const uint32_t kSsrcW = 0x02;
+const uint32_t kSsrcs4[] = { 11, 200, 30, 44 };
+
+constexpr int kRtpHistoryMs = 5000;
+
+constexpr webrtc::GainControl::Mode kDefaultAgcMode =
+#if defined(WEBRTC_IOS) || defined(WEBRTC_ANDROID)
+ webrtc::GainControl::kFixedDigital;
+#else
+ webrtc::GainControl::kAdaptiveAnalog;
+#endif
+
+constexpr webrtc::NoiseSuppression::Level kDefaultNsLevel =
+ webrtc::NoiseSuppression::kHigh;
+
+class FakeVoEWrapper : public cricket::VoEWrapper {
+ public:
+ explicit FakeVoEWrapper(cricket::FakeWebRtcVoiceEngine* engine)
+ : cricket::VoEWrapper(engine) {
+ }
+};
+
+class MockTransmitMixer : public webrtc::voe::TransmitMixer {
+ public:
+ MockTransmitMixer() = default;
+ virtual ~MockTransmitMixer() = default;
+
+ MOCK_METHOD1(EnableStereoChannelSwapping, void(bool enable));
+};
+
+void AdmSetupExpectations(webrtc::test::MockAudioDeviceModule* adm) {
+ RTC_DCHECK(adm);
+
+ // Setup.
+ EXPECT_CALL(*adm, AddRef()).Times(1);
+ EXPECT_CALL(*adm, Init()).WillOnce(Return(0));
+#if defined(WEBRTC_WIN)
+ EXPECT_CALL(*adm, SetPlayoutDevice(
+ testing::Matcher<webrtc::AudioDeviceModule::WindowsDeviceType>(
+ webrtc::AudioDeviceModule::kDefaultCommunicationDevice)))
+ .WillOnce(Return(0));
+#else
+ EXPECT_CALL(*adm, SetPlayoutDevice(0)).WillOnce(Return(0));
+#endif // #if defined(WEBRTC_WIN)
+ EXPECT_CALL(*adm, InitSpeaker()).WillOnce(Return(0));
+ EXPECT_CALL(*adm, StereoPlayoutIsAvailable(testing::_)).WillOnce(Return(0));
+ EXPECT_CALL(*adm, SetStereoPlayout(false)).WillOnce(Return(0));
+#if defined(WEBRTC_WIN)
+ EXPECT_CALL(*adm, SetRecordingDevice(
+ testing::Matcher<webrtc::AudioDeviceModule::WindowsDeviceType>(
+ webrtc::AudioDeviceModule::kDefaultCommunicationDevice)))
+ .WillOnce(Return(0));
+#else
+ EXPECT_CALL(*adm, SetRecordingDevice(0)).WillOnce(Return(0));
+#endif // #if defined(WEBRTC_WIN)
+ EXPECT_CALL(*adm, InitMicrophone()).WillOnce(Return(0));
+ EXPECT_CALL(*adm, StereoRecordingIsAvailable(testing::_)).WillOnce(Return(0));
+ EXPECT_CALL(*adm, SetStereoRecording(false)).WillOnce(Return(0));
+ EXPECT_CALL(*adm, BuiltInAECIsAvailable()).WillOnce(Return(false));
+ EXPECT_CALL(*adm, BuiltInAGCIsAvailable()).WillOnce(Return(false));
+ EXPECT_CALL(*adm, BuiltInNSIsAvailable()).WillOnce(Return(false));
+ EXPECT_CALL(*adm, SetAGC(true)).WillOnce(Return(0));
+
+ // Teardown.
+ EXPECT_CALL(*adm, StopPlayout()).WillOnce(Return(0));
+ EXPECT_CALL(*adm, StopRecording()).WillOnce(Return(0));
+ EXPECT_CALL(*adm, RegisterAudioCallback(nullptr)).WillOnce(Return(0));
+ EXPECT_CALL(*adm, Terminate()).WillOnce(Return(0));
+ EXPECT_CALL(*adm, Release())
+ .WillOnce(Return(rtc::RefCountReleaseStatus::kDroppedLastRef));
+}
+} // namespace
+
+// Tests that our stub library "works".
+TEST(WebRtcVoiceEngineTestStubLibrary, StartupShutdown) {
+ StrictMock<webrtc::test::MockAudioDeviceModule> adm;
+ AdmSetupExpectations(&adm);
+ rtc::scoped_refptr<StrictMock<webrtc::test::MockAudioProcessing>> apm =
+ new rtc::RefCountedObject<
+ StrictMock<webrtc::test::MockAudioProcessing>>();
+ webrtc::AudioProcessing::Config apm_config;
+ EXPECT_CALL(*apm, GetConfig()).WillRepeatedly(ReturnPointee(&apm_config));
+ EXPECT_CALL(*apm, ApplyConfig(_)).WillRepeatedly(SaveArg<0>(&apm_config));
+ EXPECT_CALL(*apm, SetExtraOptions(testing::_));
+ EXPECT_CALL(*apm, DetachAecDump());
+ StrictMock<MockTransmitMixer> transmit_mixer;
+ EXPECT_CALL(transmit_mixer, EnableStereoChannelSwapping(false));
+ cricket::FakeWebRtcVoiceEngine voe(&transmit_mixer);
+ EXPECT_FALSE(voe.IsInited());
+ {
+ cricket::WebRtcVoiceEngine engine(
+ &adm, webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
+ webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm,
+ new FakeVoEWrapper(&voe));
+ engine.Init();
+ EXPECT_TRUE(voe.IsInited());
+ }
+ EXPECT_FALSE(voe.IsInited());
+}
+
+class FakeAudioSink : public webrtc::AudioSinkInterface {
+ public:
+ void OnData(const Data& audio) override {}
+};
+
+class FakeAudioSource : public cricket::AudioSource {
+ void SetSink(Sink* sink) override {}
+};
+
+class WebRtcVoiceEngineTestFake : public testing::Test {
+ public:
+ WebRtcVoiceEngineTestFake() : WebRtcVoiceEngineTestFake("") {}
+
+ explicit WebRtcVoiceEngineTestFake(const char* field_trials)
+ : apm_(new rtc::RefCountedObject<
+ StrictMock<webrtc::test::MockAudioProcessing>>()),
+ apm_gc_(*apm_->gain_control()),
+ apm_ec_(*apm_->echo_cancellation()),
+ apm_ns_(*apm_->noise_suppression()),
+ apm_vd_(*apm_->voice_detection()),
+ call_(webrtc::Call::Config(&event_log_)),
+ voe_(&transmit_mixer_),
+ override_field_trials_(field_trials) {
+ // AudioDeviceModule.
+ AdmSetupExpectations(&adm_);
+ // AudioProcessing.
+ EXPECT_CALL(*apm_, GetConfig()).WillRepeatedly(ReturnPointee(&apm_config_));
+ EXPECT_CALL(*apm_, ApplyConfig(_)).WillRepeatedly(SaveArg<0>(&apm_config_));
+ EXPECT_CALL(*apm_, SetExtraOptions(testing::_));
+ EXPECT_CALL(*apm_, DetachAecDump());
+ // Default Options.
+ EXPECT_CALL(apm_ec_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_drift_compensation(false)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, set_mode(kDefaultAgcMode)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, set_analog_level_limits(0, 255)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, set_level(kDefaultNsLevel)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_vd_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(transmit_mixer_, EnableStereoChannelSwapping(false));
+ // Init does not overwrite default AGC config.
+ EXPECT_CALL(apm_gc_, target_level_dbfs()).WillOnce(Return(1));
+ EXPECT_CALL(apm_gc_, compression_gain_db()).WillRepeatedly(Return(5));
+ EXPECT_CALL(apm_gc_, is_limiter_enabled()).WillRepeatedly(Return(true));
+ EXPECT_CALL(apm_gc_, set_target_level_dbfs(1)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, set_compression_gain_db(5)).WillRepeatedly(Return(0));
+ EXPECT_CALL(apm_gc_, enable_limiter(true)).WillRepeatedly(Return(0));
+ // TODO(kwiberg): We should use mock factories here, but a bunch of
+ // the tests here probe the specific set of codecs provided by the builtin
+ // factories. Those tests should probably be moved elsewhere.
+ auto encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory();
+ auto decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory();
+ engine_.reset(new cricket::WebRtcVoiceEngine(&adm_, encoder_factory,
+ decoder_factory, nullptr, apm_,
+ new FakeVoEWrapper(&voe_)));
+ engine_->Init();
+ send_parameters_.codecs.push_back(kPcmuCodec);
+ recv_parameters_.codecs.push_back(kPcmuCodec);
+
+ // Default Options.
+ EXPECT_TRUE(IsHighPassFilterEnabled());
+ }
+
+ bool SetupChannel() {
+ EXPECT_CALL(*apm_, SetExtraOptions(testing::_));
+ channel_ = engine_->CreateChannel(&call_, cricket::MediaConfig(),
+ cricket::AudioOptions());
+ return (channel_ != nullptr);
+ }
+
+ bool SetupRecvStream() {
+ if (!SetupChannel()) {
+ return false;
+ }
+ return AddRecvStream(kSsrcX);
+ }
+
+ bool SetupSendStream() {
+ if (!SetupChannel()) {
+ return false;
+ }
+ if (!channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))) {
+ return false;
+ }
+ EXPECT_CALL(*apm_, set_output_will_be_muted(false));
+ return channel_->SetAudioSend(kSsrcX, true, nullptr, &fake_source_);
+ }
+
+ bool AddRecvStream(uint32_t ssrc) {
+ EXPECT_TRUE(channel_);
+ return channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc));
+ }
+
+ void SetupForMultiSendStream() {
+ EXPECT_TRUE(SetupSendStream());
+ // Remove stream added in Setup.
+ EXPECT_TRUE(call_.GetAudioSendStream(kSsrcX));
+ EXPECT_TRUE(channel_->RemoveSendStream(kSsrcX));
+ // Verify the channel does not exist.
+ EXPECT_FALSE(call_.GetAudioSendStream(kSsrcX));
+ }
+
+ void DeliverPacket(const void* data, int len) {
+ rtc::CopyOnWriteBuffer packet(reinterpret_cast<const uint8_t*>(data), len);
+ channel_->OnPacketReceived(&packet, rtc::PacketTime());
+ }
+
+ void TearDown() override {
+ delete channel_;
+ }
+
+ const cricket::FakeAudioSendStream& GetSendStream(uint32_t ssrc) {
+ const auto* send_stream = call_.GetAudioSendStream(ssrc);
+ EXPECT_TRUE(send_stream);
+ return *send_stream;
+ }
+
+ const cricket::FakeAudioReceiveStream& GetRecvStream(uint32_t ssrc) {
+ const auto* recv_stream = call_.GetAudioReceiveStream(ssrc);
+ EXPECT_TRUE(recv_stream);
+ return *recv_stream;
+ }
+
+ const webrtc::AudioSendStream::Config& GetSendStreamConfig(uint32_t ssrc) {
+ return GetSendStream(ssrc).GetConfig();
+ }
+
+ const webrtc::AudioReceiveStream::Config& GetRecvStreamConfig(uint32_t ssrc) {
+ return GetRecvStream(ssrc).GetConfig();
+ }
+
+ void SetSend(bool enable) {
+ ASSERT_TRUE(channel_);
+ if (enable) {
+ EXPECT_CALL(adm_, RecordingIsInitialized()).WillOnce(Return(false));
+ EXPECT_CALL(adm_, Recording()).WillOnce(Return(false));
+ EXPECT_CALL(adm_, InitRecording()).WillOnce(Return(0));
+ EXPECT_CALL(*apm_, SetExtraOptions(testing::_));
+ }
+ channel_->SetSend(enable);
+ }
+
+ void SetSendParameters(const cricket::AudioSendParameters& params) {
+ EXPECT_CALL(*apm_, SetExtraOptions(testing::_));
+ ASSERT_TRUE(channel_);
+ EXPECT_TRUE(channel_->SetSendParameters(params));
+ }
+
+ void SetAudioSend(uint32_t ssrc, bool enable, cricket::AudioSource* source,
+ const cricket::AudioOptions* options = nullptr) {
+ EXPECT_CALL(*apm_, set_output_will_be_muted(!enable));
+ ASSERT_TRUE(channel_);
+ if (enable && options) {
+ EXPECT_CALL(*apm_, SetExtraOptions(testing::_));
+ }
+ EXPECT_TRUE(channel_->SetAudioSend(ssrc, enable, options, source));
+ }
+
+ void TestInsertDtmf(uint32_t ssrc, bool caller,
+ const cricket::AudioCodec& codec) {
+ EXPECT_TRUE(SetupChannel());
+ if (caller) {
+ // If this is a caller, local description will be applied and add the
+ // send stream.
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrcX)));
+ }
+
+ // Test we can only InsertDtmf when the other side supports telephone-event.
+ SetSendParameters(send_parameters_);
+ SetSend(true);
+ EXPECT_FALSE(channel_->CanInsertDtmf());
+ EXPECT_FALSE(channel_->InsertDtmf(ssrc, 1, 111));
+ send_parameters_.codecs.push_back(codec);
+ SetSendParameters(send_parameters_);
+ EXPECT_TRUE(channel_->CanInsertDtmf());
+
+ if (!caller) {
+ // If this is callee, there's no active send channel yet.
+ EXPECT_FALSE(channel_->InsertDtmf(ssrc, 2, 123));
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrcX)));
+ }
+
+ // Check we fail if the ssrc is invalid.
+ EXPECT_FALSE(channel_->InsertDtmf(-1, 1, 111));
+
+ // Test send.
+ cricket::FakeAudioSendStream::TelephoneEvent telephone_event =
+ GetSendStream(kSsrcX).GetLatestTelephoneEvent();
+ EXPECT_EQ(-1, telephone_event.payload_type);
+ EXPECT_TRUE(channel_->InsertDtmf(ssrc, 2, 123));
+ telephone_event = GetSendStream(kSsrcX).GetLatestTelephoneEvent();
+ EXPECT_EQ(codec.id, telephone_event.payload_type);
+ EXPECT_EQ(codec.clockrate, telephone_event.payload_frequency);
+ EXPECT_EQ(2, telephone_event.event_code);
+ EXPECT_EQ(123, telephone_event.duration_ms);
+ }
+
+ // Test that send bandwidth is set correctly.
+ // |codec| is the codec under test.
+ // |max_bitrate| is a parameter to set to SetMaxSendBandwidth().
+ // |expected_result| is the expected result from SetMaxSendBandwidth().
+ // |expected_bitrate| is the expected audio bitrate afterward.
+ void TestMaxSendBandwidth(const cricket::AudioCodec& codec,
+ int max_bitrate,
+ bool expected_result,
+ int expected_bitrate) {
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(codec);
+ parameters.max_bandwidth_bps = max_bitrate;
+ if (expected_result) {
+ SetSendParameters(parameters);
+ } else {
+ EXPECT_FALSE(channel_->SetSendParameters(parameters));
+ }
+ EXPECT_EQ(expected_bitrate, GetCodecBitrate(kSsrcX));
+ }
+
+ // Sets the per-stream maximum bitrate limit for the specified SSRC.
+ bool SetMaxBitrateForStream(int32_t ssrc, int bitrate) {
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(ssrc);
+ EXPECT_EQ(1UL, parameters.encodings.size());
+
+ parameters.encodings[0].max_bitrate_bps = bitrate;
+ return channel_->SetRtpSendParameters(ssrc, parameters);
+ }
+
+ void SetGlobalMaxBitrate(const cricket::AudioCodec& codec, int bitrate) {
+ cricket::AudioSendParameters send_parameters;
+ send_parameters.codecs.push_back(codec);
+ send_parameters.max_bandwidth_bps = bitrate;
+ SetSendParameters(send_parameters);
+ }
+
+ void CheckSendCodecBitrate(int32_t ssrc,
+ const char expected_name[],
+ int expected_bitrate) {
+ const auto& spec = GetSendStreamConfig(ssrc).send_codec_spec;
+ EXPECT_EQ(expected_name, spec->format.name);
+ EXPECT_EQ(expected_bitrate, spec->target_bitrate_bps);
+ }
+
+ rtc::Optional<int> GetCodecBitrate(int32_t ssrc) {
+ return GetSendStreamConfig(ssrc).send_codec_spec->target_bitrate_bps;
+ }
+
+ const rtc::Optional<std::string>& GetAudioNetworkAdaptorConfig(int32_t ssrc) {
+ return GetSendStreamConfig(ssrc).audio_network_adaptor_config;
+ }
+
+ void SetAndExpectMaxBitrate(const cricket::AudioCodec& codec,
+ int global_max,
+ int stream_max,
+ bool expected_result,
+ int expected_codec_bitrate) {
+ // Clear the bitrate limit from the previous test case.
+ EXPECT_TRUE(SetMaxBitrateForStream(kSsrcX, -1));
+
+ // Attempt to set the requested bitrate limits.
+ SetGlobalMaxBitrate(codec, global_max);
+ EXPECT_EQ(expected_result, SetMaxBitrateForStream(kSsrcX, stream_max));
+
+ // Verify that reading back the parameters gives results
+ // consistent with the Set() result.
+ webrtc::RtpParameters resulting_parameters =
+ channel_->GetRtpSendParameters(kSsrcX);
+ EXPECT_EQ(1UL, resulting_parameters.encodings.size());
+ EXPECT_EQ(expected_result ? stream_max : -1,
+ resulting_parameters.encodings[0].max_bitrate_bps);
+
+ // Verify that the codec settings have the expected bitrate.
+ EXPECT_EQ(expected_codec_bitrate, GetCodecBitrate(kSsrcX));
+ }
+
+ void SetSendCodecsShouldWorkForBitrates(const char* min_bitrate_kbps,
+ int expected_min_bitrate_bps,
+ const char* start_bitrate_kbps,
+ int expected_start_bitrate_bps,
+ const char* max_bitrate_kbps,
+ int expected_max_bitrate_bps) {
+ EXPECT_TRUE(SetupSendStream());
+ auto& codecs = send_parameters_.codecs;
+ codecs.clear();
+ codecs.push_back(kOpusCodec);
+ codecs[0].params[cricket::kCodecParamMinBitrate] = min_bitrate_kbps;
+ codecs[0].params[cricket::kCodecParamStartBitrate] = start_bitrate_kbps;
+ codecs[0].params[cricket::kCodecParamMaxBitrate] = max_bitrate_kbps;
+ SetSendParameters(send_parameters_);
+
+ EXPECT_EQ(expected_min_bitrate_bps,
+ call_.GetConfig().bitrate_config.min_bitrate_bps);
+ EXPECT_EQ(expected_start_bitrate_bps,
+ call_.GetConfig().bitrate_config.start_bitrate_bps);
+ EXPECT_EQ(expected_max_bitrate_bps,
+ call_.GetConfig().bitrate_config.max_bitrate_bps);
+ }
+
+ void TestSetSendRtpHeaderExtensions(const std::string& ext) {
+ EXPECT_TRUE(SetupSendStream());
+
+ // Ensure extensions are off by default.
+ EXPECT_EQ(0u, GetSendStreamConfig(kSsrcX).rtp.extensions.size());
+
+ // Ensure unknown extensions won't cause an error.
+ send_parameters_.extensions.push_back(
+ webrtc::RtpExtension("urn:ietf:params:unknownextention", 1));
+ SetSendParameters(send_parameters_);
+ EXPECT_EQ(0u, GetSendStreamConfig(kSsrcX).rtp.extensions.size());
+
+ // Ensure extensions stay off with an empty list of headers.
+ send_parameters_.extensions.clear();
+ SetSendParameters(send_parameters_);
+ EXPECT_EQ(0u, GetSendStreamConfig(kSsrcX).rtp.extensions.size());
+
+ // Ensure extension is set properly.
+ const int id = 1;
+ send_parameters_.extensions.push_back(webrtc::RtpExtension(ext, id));
+ SetSendParameters(send_parameters_);
+ EXPECT_EQ(1u, GetSendStreamConfig(kSsrcX).rtp.extensions.size());
+ EXPECT_EQ(ext, GetSendStreamConfig(kSsrcX).rtp.extensions[0].uri);
+ EXPECT_EQ(id, GetSendStreamConfig(kSsrcX).rtp.extensions[0].id);
+
+ // Ensure extension is set properly on new stream.
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrcY)));
+ EXPECT_NE(call_.GetAudioSendStream(kSsrcX),
+ call_.GetAudioSendStream(kSsrcY));
+ EXPECT_EQ(1u, GetSendStreamConfig(kSsrcY).rtp.extensions.size());
+ EXPECT_EQ(ext, GetSendStreamConfig(kSsrcY).rtp.extensions[0].uri);
+ EXPECT_EQ(id, GetSendStreamConfig(kSsrcY).rtp.extensions[0].id);
+
+ // Ensure all extensions go back off with an empty list.
+ send_parameters_.codecs.push_back(kPcmuCodec);
+ send_parameters_.extensions.clear();
+ SetSendParameters(send_parameters_);
+ EXPECT_EQ(0u, GetSendStreamConfig(kSsrcX).rtp.extensions.size());
+ EXPECT_EQ(0u, GetSendStreamConfig(kSsrcY).rtp.extensions.size());
+ }
+
+ void TestSetRecvRtpHeaderExtensions(const std::string& ext) {
+ EXPECT_TRUE(SetupRecvStream());
+
+ // Ensure extensions are off by default.
+ EXPECT_EQ(0u, GetRecvStreamConfig(kSsrcX).rtp.extensions.size());
+
+ // Ensure unknown extensions won't cause an error.
+ recv_parameters_.extensions.push_back(
+ webrtc::RtpExtension("urn:ietf:params:unknownextention", 1));
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ EXPECT_EQ(0u, GetRecvStreamConfig(kSsrcX).rtp.extensions.size());
+
+ // Ensure extensions stay off with an empty list of headers.
+ recv_parameters_.extensions.clear();
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ EXPECT_EQ(0u, GetRecvStreamConfig(kSsrcX).rtp.extensions.size());
+
+ // Ensure extension is set properly.
+ const int id = 2;
+ recv_parameters_.extensions.push_back(webrtc::RtpExtension(ext, id));
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ EXPECT_EQ(1u, GetRecvStreamConfig(kSsrcX).rtp.extensions.size());
+ EXPECT_EQ(ext, GetRecvStreamConfig(kSsrcX).rtp.extensions[0].uri);
+ EXPECT_EQ(id, GetRecvStreamConfig(kSsrcX).rtp.extensions[0].id);
+
+ // Ensure extension is set properly on new stream.
+ EXPECT_TRUE(AddRecvStream(kSsrcY));
+ EXPECT_NE(call_.GetAudioReceiveStream(kSsrcX),
+ call_.GetAudioReceiveStream(kSsrcY));
+ EXPECT_EQ(1u, GetRecvStreamConfig(kSsrcY).rtp.extensions.size());
+ EXPECT_EQ(ext, GetRecvStreamConfig(kSsrcY).rtp.extensions[0].uri);
+ EXPECT_EQ(id, GetRecvStreamConfig(kSsrcY).rtp.extensions[0].id);
+
+ // Ensure all extensions go back off with an empty list.
+ recv_parameters_.extensions.clear();
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ EXPECT_EQ(0u, GetRecvStreamConfig(kSsrcX).rtp.extensions.size());
+ EXPECT_EQ(0u, GetRecvStreamConfig(kSsrcY).rtp.extensions.size());
+ }
+
+ webrtc::AudioSendStream::Stats GetAudioSendStreamStats() const {
+ webrtc::AudioSendStream::Stats stats;
+ stats.local_ssrc = 12;
+ stats.bytes_sent = 345;
+ stats.packets_sent = 678;
+ stats.packets_lost = 9012;
+ stats.fraction_lost = 34.56f;
+ stats.codec_name = "codec_name_send";
+ stats.codec_payload_type = 42;
+ stats.ext_seqnum = 789;
+ stats.jitter_ms = 12;
+ stats.rtt_ms = 345;
+ stats.audio_level = 678;
+ stats.apm_statistics.delay_median_ms = 234;
+ stats.apm_statistics.delay_standard_deviation_ms = 567;
+ stats.apm_statistics.echo_return_loss = 890;
+ stats.apm_statistics.echo_return_loss_enhancement = 1234;
+ stats.apm_statistics.residual_echo_likelihood = 0.432f;
+ stats.apm_statistics.residual_echo_likelihood_recent_max = 0.6f;
+ stats.ana_statistics.bitrate_action_counter = 321;
+ stats.ana_statistics.channel_action_counter = 432;
+ stats.ana_statistics.dtx_action_counter = 543;
+ stats.ana_statistics.fec_action_counter = 654;
+ stats.ana_statistics.frame_length_increase_counter = 765;
+ stats.ana_statistics.frame_length_decrease_counter = 876;
+ stats.ana_statistics.uplink_packet_loss_fraction = 987.0;
+ stats.typing_noise_detected = true;
+ return stats;
+ }
+ void SetAudioSendStreamStats() {
+ for (auto* s : call_.GetAudioSendStreams()) {
+ s->SetStats(GetAudioSendStreamStats());
+ }
+ }
+ void VerifyVoiceSenderInfo(const cricket::VoiceSenderInfo& info,
+ bool is_sending) {
+ const auto stats = GetAudioSendStreamStats();
+ EXPECT_EQ(info.ssrc(), stats.local_ssrc);
+ EXPECT_EQ(info.bytes_sent, stats.bytes_sent);
+ EXPECT_EQ(info.packets_sent, stats.packets_sent);
+ EXPECT_EQ(info.packets_lost, stats.packets_lost);
+ EXPECT_EQ(info.fraction_lost, stats.fraction_lost);
+ EXPECT_EQ(info.codec_name, stats.codec_name);
+ EXPECT_EQ(info.codec_payload_type, stats.codec_payload_type);
+ EXPECT_EQ(info.ext_seqnum, stats.ext_seqnum);
+ EXPECT_EQ(info.jitter_ms, stats.jitter_ms);
+ EXPECT_EQ(info.rtt_ms, stats.rtt_ms);
+ EXPECT_EQ(info.audio_level, stats.audio_level);
+ EXPECT_EQ(info.apm_statistics.delay_median_ms,
+ stats.apm_statistics.delay_median_ms);
+ EXPECT_EQ(info.apm_statistics.delay_standard_deviation_ms,
+ stats.apm_statistics.delay_standard_deviation_ms);
+ EXPECT_EQ(info.apm_statistics.echo_return_loss,
+ stats.apm_statistics.echo_return_loss);
+ EXPECT_EQ(info.apm_statistics.echo_return_loss_enhancement,
+ stats.apm_statistics.echo_return_loss_enhancement);
+ EXPECT_EQ(info.apm_statistics.residual_echo_likelihood,
+ stats.apm_statistics.residual_echo_likelihood);
+ EXPECT_EQ(info.apm_statistics.residual_echo_likelihood_recent_max,
+ stats.apm_statistics.residual_echo_likelihood_recent_max);
+ EXPECT_EQ(info.ana_statistics.bitrate_action_counter,
+ stats.ana_statistics.bitrate_action_counter);
+ EXPECT_EQ(info.ana_statistics.channel_action_counter,
+ stats.ana_statistics.channel_action_counter);
+ EXPECT_EQ(info.ana_statistics.dtx_action_counter,
+ stats.ana_statistics.dtx_action_counter);
+ EXPECT_EQ(info.ana_statistics.fec_action_counter,
+ stats.ana_statistics.fec_action_counter);
+ EXPECT_EQ(info.ana_statistics.frame_length_increase_counter,
+ stats.ana_statistics.frame_length_increase_counter);
+ EXPECT_EQ(info.ana_statistics.frame_length_decrease_counter,
+ stats.ana_statistics.frame_length_decrease_counter);
+ EXPECT_EQ(info.ana_statistics.uplink_packet_loss_fraction,
+ stats.ana_statistics.uplink_packet_loss_fraction);
+ EXPECT_EQ(info.typing_noise_detected,
+ stats.typing_noise_detected && is_sending);
+ }
+
+ webrtc::AudioReceiveStream::Stats GetAudioReceiveStreamStats() const {
+ webrtc::AudioReceiveStream::Stats stats;
+ stats.remote_ssrc = 123;
+ stats.bytes_rcvd = 456;
+ stats.packets_rcvd = 768;
+ stats.packets_lost = 101;
+ stats.fraction_lost = 23.45f;
+ stats.codec_name = "codec_name_recv";
+ stats.codec_payload_type = 42;
+ stats.ext_seqnum = 678;
+ stats.jitter_ms = 901;
+ stats.jitter_buffer_ms = 234;
+ stats.jitter_buffer_preferred_ms = 567;
+ stats.delay_estimate_ms = 890;
+ stats.audio_level = 1234;
+ stats.total_samples_received = 5678901;
+ stats.concealed_samples = 234;
+ stats.concealment_events = 12;
+ stats.jitter_buffer_delay_seconds = 34;
+ stats.expand_rate = 5.67f;
+ stats.speech_expand_rate = 8.90f;
+ stats.secondary_decoded_rate = 1.23f;
+ stats.secondary_discarded_rate = 0.12f;
+ stats.accelerate_rate = 4.56f;
+ stats.preemptive_expand_rate = 7.89f;
+ stats.decoding_calls_to_silence_generator = 12;
+ stats.decoding_calls_to_neteq = 345;
+ stats.decoding_normal = 67890;
+ stats.decoding_plc = 1234;
+ stats.decoding_cng = 5678;
+ stats.decoding_plc_cng = 9012;
+ stats.decoding_muted_output = 3456;
+ stats.capture_start_ntp_time_ms = 7890;
+ return stats;
+ }
+ void SetAudioReceiveStreamStats() {
+ for (auto* s : call_.GetAudioReceiveStreams()) {
+ s->SetStats(GetAudioReceiveStreamStats());
+ }
+ }
+ void VerifyVoiceReceiverInfo(const cricket::VoiceReceiverInfo& info) {
+ const auto stats = GetAudioReceiveStreamStats();
+ EXPECT_EQ(info.ssrc(), stats.remote_ssrc);
+ EXPECT_EQ(info.bytes_rcvd, stats.bytes_rcvd);
+ EXPECT_EQ(info.packets_rcvd, stats.packets_rcvd);
+ EXPECT_EQ(info.packets_lost, stats.packets_lost);
+ EXPECT_EQ(info.fraction_lost, stats.fraction_lost);
+ EXPECT_EQ(info.codec_name, stats.codec_name);
+ EXPECT_EQ(info.codec_payload_type, stats.codec_payload_type);
+ EXPECT_EQ(info.ext_seqnum, stats.ext_seqnum);
+ EXPECT_EQ(info.jitter_ms, stats.jitter_ms);
+ EXPECT_EQ(info.jitter_buffer_ms, stats.jitter_buffer_ms);
+ EXPECT_EQ(info.jitter_buffer_preferred_ms,
+ stats.jitter_buffer_preferred_ms);
+ EXPECT_EQ(info.delay_estimate_ms, stats.delay_estimate_ms);
+ EXPECT_EQ(info.audio_level, stats.audio_level);
+ EXPECT_EQ(info.total_samples_received, stats.total_samples_received);
+ EXPECT_EQ(info.concealed_samples, stats.concealed_samples);
+ EXPECT_EQ(info.concealment_events, stats.concealment_events);
+ EXPECT_EQ(info.jitter_buffer_delay_seconds,
+ stats.jitter_buffer_delay_seconds);
+ EXPECT_EQ(info.expand_rate, stats.expand_rate);
+ EXPECT_EQ(info.speech_expand_rate, stats.speech_expand_rate);
+ EXPECT_EQ(info.secondary_decoded_rate, stats.secondary_decoded_rate);
+ EXPECT_EQ(info.secondary_discarded_rate, stats.secondary_discarded_rate);
+ EXPECT_EQ(info.accelerate_rate, stats.accelerate_rate);
+ EXPECT_EQ(info.preemptive_expand_rate, stats.preemptive_expand_rate);
+ EXPECT_EQ(info.decoding_calls_to_silence_generator,
+ stats.decoding_calls_to_silence_generator);
+ EXPECT_EQ(info.decoding_calls_to_neteq, stats.decoding_calls_to_neteq);
+ EXPECT_EQ(info.decoding_normal, stats.decoding_normal);
+ EXPECT_EQ(info.decoding_plc, stats.decoding_plc);
+ EXPECT_EQ(info.decoding_cng, stats.decoding_cng);
+ EXPECT_EQ(info.decoding_plc_cng, stats.decoding_plc_cng);
+ EXPECT_EQ(info.decoding_muted_output, stats.decoding_muted_output);
+ EXPECT_EQ(info.capture_start_ntp_time_ms, stats.capture_start_ntp_time_ms);
+ }
+ void VerifyVoiceSendRecvCodecs(const cricket::VoiceMediaInfo& info) const {
+ EXPECT_EQ(send_parameters_.codecs.size(), info.send_codecs.size());
+ for (const cricket::AudioCodec& codec : send_parameters_.codecs) {
+ ASSERT_EQ(info.send_codecs.count(codec.id), 1U);
+ EXPECT_EQ(info.send_codecs.find(codec.id)->second,
+ codec.ToCodecParameters());
+ }
+ EXPECT_EQ(recv_parameters_.codecs.size(), info.receive_codecs.size());
+ for (const cricket::AudioCodec& codec : recv_parameters_.codecs) {
+ ASSERT_EQ(info.receive_codecs.count(codec.id), 1U);
+ EXPECT_EQ(info.receive_codecs.find(codec.id)->second,
+ codec.ToCodecParameters());
+ }
+ }
+
+ bool IsHighPassFilterEnabled() {
+ return engine_->GetApmConfigForTest().high_pass_filter.enabled;
+ }
+
+ protected:
+ StrictMock<webrtc::test::MockAudioDeviceModule> adm_;
+ rtc::scoped_refptr<StrictMock<webrtc::test::MockAudioProcessing>> apm_;
+ webrtc::test::MockGainControl& apm_gc_;
+ webrtc::test::MockEchoCancellation& apm_ec_;
+ webrtc::test::MockNoiseSuppression& apm_ns_;
+ webrtc::test::MockVoiceDetection& apm_vd_;
+ StrictMock<MockTransmitMixer> transmit_mixer_;
+ webrtc::RtcEventLogNullImpl event_log_;
+ cricket::FakeCall call_;
+ cricket::FakeWebRtcVoiceEngine voe_;
+ std::unique_ptr<cricket::WebRtcVoiceEngine> engine_;
+ cricket::VoiceMediaChannel* channel_ = nullptr;
+ cricket::AudioSendParameters send_parameters_;
+ cricket::AudioRecvParameters recv_parameters_;
+ FakeAudioSource fake_source_;
+ webrtc::AudioProcessing::Config apm_config_;
+
+ private:
+ webrtc::test::ScopedFieldTrials override_field_trials_;
+};
+
+// Tests that we can create and destroy a channel.
+TEST_F(WebRtcVoiceEngineTestFake, CreateChannel) {
+ EXPECT_TRUE(SetupChannel());
+}
+
+// Test that we can add a send stream and that it has the correct defaults.
+TEST_F(WebRtcVoiceEngineTestFake, CreateSendStream) {
+ EXPECT_TRUE(SetupChannel());
+ EXPECT_TRUE(
+ channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX)));
+ const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX);
+ EXPECT_EQ(kSsrcX, config.rtp.ssrc);
+ EXPECT_EQ("", config.rtp.c_name);
+ EXPECT_EQ(0u, config.rtp.extensions.size());
+ EXPECT_EQ(static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_),
+ config.send_transport);
+}
+
+// Test that we can add a receive stream and that it has the correct defaults.
+TEST_F(WebRtcVoiceEngineTestFake, CreateRecvStream) {
+ EXPECT_TRUE(SetupChannel());
+ EXPECT_TRUE(AddRecvStream(kSsrcX));
+ const webrtc::AudioReceiveStream::Config& config =
+ GetRecvStreamConfig(kSsrcX);
+ EXPECT_EQ(kSsrcX, config.rtp.remote_ssrc);
+ EXPECT_EQ(0xFA17FA17, config.rtp.local_ssrc);
+ EXPECT_FALSE(config.rtp.transport_cc);
+ EXPECT_EQ(0u, config.rtp.extensions.size());
+ EXPECT_EQ(static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_),
+ config.rtcp_send_transport);
+ EXPECT_EQ("", config.sync_group);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, OpusSupportsTransportCc) {
+ const std::vector<cricket::AudioCodec>& codecs = engine_->send_codecs();
+ bool opus_found = false;
+ for (cricket::AudioCodec codec : codecs) {
+ if (codec.name == "opus") {
+ EXPECT_TRUE(HasTransportCc(codec));
+ opus_found = true;
+ }
+ }
+ EXPECT_TRUE(opus_found);
+}
+
+// Test that we set our inbound codecs properly, including changing PT.
+TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecs) {
+ EXPECT_TRUE(SetupChannel());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ parameters.codecs.push_back(kTelephoneEventCodec1);
+ parameters.codecs.push_back(kTelephoneEventCodec2);
+ parameters.codecs[0].id = 106; // collide with existing CN 32k
+ parameters.codecs[2].id = 126;
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ EXPECT_TRUE(AddRecvStream(kSsrcX));
+ EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map,
+ (ContainerEq<std::map<int, webrtc::SdpAudioFormat>>(
+ {{0, {"PCMU", 8000, 1}},
+ {106, {"ISAC", 16000, 1}},
+ {126, {"telephone-event", 8000, 1}},
+ {107, {"telephone-event", 32000, 1}}})));
+}
+
+// Test that we fail to set an unknown inbound codec.
+TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecsUnsupportedCodec) {
+ EXPECT_TRUE(SetupChannel());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(cricket::AudioCodec(127, "XYZ", 32000, 0, 1));
+ EXPECT_FALSE(channel_->SetRecvParameters(parameters));
+}
+
+// Test that we fail if we have duplicate types in the inbound list.
+TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecsDuplicatePayloadType) {
+ EXPECT_TRUE(SetupChannel());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kCn16000Codec);
+ parameters.codecs[1].id = kIsacCodec.id;
+ EXPECT_FALSE(channel_->SetRecvParameters(parameters));
+}
+
+// Test that we can decode OPUS without stereo parameters.
+TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpusNoStereo) {
+ EXPECT_TRUE(SetupChannel());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ parameters.codecs.push_back(kOpusCodec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ EXPECT_TRUE(AddRecvStream(kSsrcX));
+ EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map,
+ (ContainerEq<std::map<int, webrtc::SdpAudioFormat>>(
+ {{0, {"PCMU", 8000, 1}},
+ {103, {"ISAC", 16000, 1}},
+ {111, {"opus", 48000, 2}}})));
+}
+
+// Test that we can decode OPUS with stereo = 0.
+TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus0Stereo) {
+ EXPECT_TRUE(SetupChannel());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[2].params["stereo"] = "0";
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ EXPECT_TRUE(AddRecvStream(kSsrcX));
+ EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map,
+ (ContainerEq<std::map<int, webrtc::SdpAudioFormat>>(
+ {{0, {"PCMU", 8000, 1}},
+ {103, {"ISAC", 16000, 1}},
+ {111, {"opus", 48000, 2, {{"stereo", "0"}}}}})));
+}
+
+// Test that we can decode OPUS with stereo = 1.
+TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus1Stereo) {
+ EXPECT_TRUE(SetupChannel());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[2].params["stereo"] = "1";
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ EXPECT_TRUE(AddRecvStream(kSsrcX));
+ EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map,
+ (ContainerEq<std::map<int, webrtc::SdpAudioFormat>>(
+ {{0, {"PCMU", 8000, 1}},
+ {103, {"ISAC", 16000, 1}},
+ {111, {"opus", 48000, 2, {{"stereo", "1"}}}}})));
+}
+
+// Test that changes to recv codecs are applied to all streams.
+TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecsWithMultipleStreams) {
+ EXPECT_TRUE(SetupChannel());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ parameters.codecs.push_back(kTelephoneEventCodec1);
+ parameters.codecs.push_back(kTelephoneEventCodec2);
+ parameters.codecs[0].id = 106; // collide with existing CN 32k
+ parameters.codecs[2].id = 126;
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ for (const auto& ssrc : {kSsrcX, kSsrcY}) {
+ EXPECT_TRUE(AddRecvStream(ssrc));
+ EXPECT_THAT(GetRecvStreamConfig(ssrc).decoder_map,
+ (ContainerEq<std::map<int, webrtc::SdpAudioFormat>>(
+ {{0, {"PCMU", 8000, 1}},
+ {106, {"ISAC", 16000, 1}},
+ {126, {"telephone-event", 8000, 1}},
+ {107, {"telephone-event", 32000, 1}}})));
+ }
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecsAfterAddingStreams) {
+ EXPECT_TRUE(SetupRecvStream());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs[0].id = 106; // collide with existing CN 32k
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ const auto& dm = GetRecvStreamConfig(kSsrcX).decoder_map;
+ ASSERT_EQ(1, dm.count(106));
+ EXPECT_EQ(webrtc::SdpAudioFormat("isac", 16000, 1), dm.at(106));
+}
+
+// Test that we can apply the same set of codecs again while playing.
+TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecsWhilePlaying) {
+ EXPECT_TRUE(SetupRecvStream());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kCn16000Codec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ channel_->SetPlayout(true);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ // Remapping a payload type to a different codec should fail.
+ parameters.codecs[0] = kOpusCodec;
+ parameters.codecs[0].id = kIsacCodec.id;
+ EXPECT_FALSE(channel_->SetRecvParameters(parameters));
+ EXPECT_TRUE(GetRecvStream(kSsrcX).started());
+}
+
+// Test that we can add a codec while playing.
+TEST_F(WebRtcVoiceEngineTestFake, AddRecvCodecsWhilePlaying) {
+ EXPECT_TRUE(SetupRecvStream());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kCn16000Codec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ channel_->SetPlayout(true);
+
+ parameters.codecs.push_back(kOpusCodec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ EXPECT_TRUE(GetRecvStream(kSsrcX).started());
+}
+
+// Test that we accept adding the same codec with a different payload type.
+// See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5847
+TEST_F(WebRtcVoiceEngineTestFake, ChangeRecvCodecPayloadType) {
+ EXPECT_TRUE(SetupRecvStream());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ ++parameters.codecs[0].id;
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetSendBandwidthAuto) {
+ EXPECT_TRUE(SetupSendStream());
+
+ // Test that when autobw is enabled, bitrate is kept as the default
+ // value. autobw is enabled for the following tests because the target
+ // bitrate is <= 0.
+
+ // ISAC, default bitrate == 32000.
+ TestMaxSendBandwidth(kIsacCodec, 0, true, 32000);
+
+ // PCMU, default bitrate == 64000.
+ TestMaxSendBandwidth(kPcmuCodec, -1, true, 64000);
+
+ // opus, default bitrate == 32000 in mono.
+ TestMaxSendBandwidth(kOpusCodec, -1, true, 32000);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthMultiRateAsCaller) {
+ EXPECT_TRUE(SetupSendStream());
+
+ // ISAC, default bitrate == 32000.
+ TestMaxSendBandwidth(kIsacCodec, 16000, true, 16000);
+ // Rates above the max (56000) should be capped.
+ TestMaxSendBandwidth(kIsacCodec, 100000, true, 32000);
+
+ // opus, default bitrate == 64000.
+ TestMaxSendBandwidth(kOpusCodec, 96000, true, 96000);
+ TestMaxSendBandwidth(kOpusCodec, 48000, true, 48000);
+ // Rates above the max (510000) should be capped.
+ TestMaxSendBandwidth(kOpusCodec, 600000, true, 510000);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthFixedRateAsCaller) {
+ EXPECT_TRUE(SetupSendStream());
+
+ // Test that we can only set a maximum bitrate for a fixed-rate codec
+ // if it's bigger than the fixed rate.
+
+ // PCMU, fixed bitrate == 64000.
+ TestMaxSendBandwidth(kPcmuCodec, 0, true, 64000);
+ TestMaxSendBandwidth(kPcmuCodec, 1, false, 64000);
+ TestMaxSendBandwidth(kPcmuCodec, 128000, true, 64000);
+ TestMaxSendBandwidth(kPcmuCodec, 32000, false, 64000);
+ TestMaxSendBandwidth(kPcmuCodec, 64000, true, 64000);
+ TestMaxSendBandwidth(kPcmuCodec, 63999, false, 64000);
+ TestMaxSendBandwidth(kPcmuCodec, 64001, true, 64000);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthMultiRateAsCallee) {
+ EXPECT_TRUE(SetupChannel());
+ const int kDesiredBitrate = 128000;
+ cricket::AudioSendParameters parameters;
+ parameters.codecs = engine_->send_codecs();
+ parameters.max_bandwidth_bps = kDesiredBitrate;
+ SetSendParameters(parameters);
+
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrcX)));
+
+ EXPECT_EQ(kDesiredBitrate, GetCodecBitrate(kSsrcX));
+}
+
+// Test that bitrate cannot be set for CBR codecs.
+// Bitrate is ignored if it is higher than the fixed bitrate.
+// Bitrate less then the fixed bitrate is an error.
+TEST_F(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthCbr) {
+ EXPECT_TRUE(SetupSendStream());
+
+ // PCMU, default bitrate == 64000.
+ SetSendParameters(send_parameters_);
+ EXPECT_EQ(64000, GetCodecBitrate(kSsrcX));
+
+ send_parameters_.max_bandwidth_bps = 128000;
+ SetSendParameters(send_parameters_);
+ EXPECT_EQ(64000, GetCodecBitrate(kSsrcX));
+
+ send_parameters_.max_bandwidth_bps = 128;
+ EXPECT_FALSE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_EQ(64000, GetCodecBitrate(kSsrcX));
+}
+
+// Test that the per-stream bitrate limit and the global
+// bitrate limit both apply.
+TEST_F(WebRtcVoiceEngineTestFake, SetMaxBitratePerStream) {
+ EXPECT_TRUE(SetupSendStream());
+
+ // opus, default bitrate == 32000.
+ SetAndExpectMaxBitrate(kOpusCodec, 0, 0, true, 32000);
+ SetAndExpectMaxBitrate(kOpusCodec, 48000, 0, true, 48000);
+ SetAndExpectMaxBitrate(kOpusCodec, 48000, 64000, true, 48000);
+ SetAndExpectMaxBitrate(kOpusCodec, 64000, 48000, true, 48000);
+
+ // CBR codecs allow both maximums to exceed the bitrate.
+ SetAndExpectMaxBitrate(kPcmuCodec, 0, 0, true, 64000);
+ SetAndExpectMaxBitrate(kPcmuCodec, 64001, 0, true, 64000);
+ SetAndExpectMaxBitrate(kPcmuCodec, 0, 64001, true, 64000);
+ SetAndExpectMaxBitrate(kPcmuCodec, 64001, 64001, true, 64000);
+
+ // CBR codecs don't allow per stream maximums to be too low.
+ SetAndExpectMaxBitrate(kPcmuCodec, 0, 63999, false, 64000);
+ SetAndExpectMaxBitrate(kPcmuCodec, 64001, 63999, false, 64000);
+}
+
+// Test that an attempt to set RtpParameters for a stream that does not exist
+// fails.
+TEST_F(WebRtcVoiceEngineTestFake, CannotSetMaxBitrateForNonexistentStream) {
+ EXPECT_TRUE(SetupChannel());
+ webrtc::RtpParameters nonexistent_parameters =
+ channel_->GetRtpSendParameters(kSsrcX);
+ EXPECT_EQ(0, nonexistent_parameters.encodings.size());
+
+ nonexistent_parameters.encodings.push_back(webrtc::RtpEncodingParameters());
+ EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, nonexistent_parameters));
+}
+
+TEST_F(WebRtcVoiceEngineTestFake,
+ CannotSetRtpSendParametersWithIncorrectNumberOfEncodings) {
+ // This test verifies that setting RtpParameters succeeds only if
+ // the structure contains exactly one encoding.
+ // TODO(skvlad): Update this test when we start supporting setting parameters
+ // for each encoding individually.
+
+ EXPECT_TRUE(SetupSendStream());
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX);
+ // Two or more encodings should result in failure.
+ parameters.encodings.push_back(webrtc::RtpEncodingParameters());
+ EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, parameters));
+ // Zero encodings should also fail.
+ parameters.encodings.clear();
+ EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, parameters));
+}
+
+// Changing the SSRC through RtpParameters is not allowed.
+TEST_F(WebRtcVoiceEngineTestFake, CannotSetSsrcInRtpSendParameters) {
+ EXPECT_TRUE(SetupSendStream());
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX);
+ parameters.encodings[0].ssrc = 0xdeadbeef;
+ EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, parameters));
+}
+
+// Test that a stream will not be sending if its encoding is made
+// inactive through SetRtpSendParameters.
+TEST_F(WebRtcVoiceEngineTestFake, SetRtpParametersEncodingsActive) {
+ EXPECT_TRUE(SetupSendStream());
+ SetSend(true);
+ EXPECT_TRUE(GetSendStream(kSsrcX).IsSending());
+ // Get current parameters and change "active" to false.
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX);
+ ASSERT_EQ(1u, parameters.encodings.size());
+ ASSERT_TRUE(parameters.encodings[0].active);
+ parameters.encodings[0].active = false;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters));
+ EXPECT_FALSE(GetSendStream(kSsrcX).IsSending());
+
+ // Now change it back to active and verify we resume sending.
+ parameters.encodings[0].active = true;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters));
+ EXPECT_TRUE(GetSendStream(kSsrcX).IsSending());
+}
+
+// Test that SetRtpSendParameters configures the correct encoding channel for
+// each SSRC.
+TEST_F(WebRtcVoiceEngineTestFake, RtpParametersArePerStream) {
+ SetupForMultiSendStream();
+ // Create send streams.
+ for (uint32_t ssrc : kSsrcs4) {
+ EXPECT_TRUE(
+ channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc)));
+ }
+ // Configure one stream to be limited by the stream config, another to be
+ // limited by the global max, and the third one with no per-stream limit
+ // (still subject to the global limit).
+ SetGlobalMaxBitrate(kOpusCodec, 32000);
+ EXPECT_TRUE(SetMaxBitrateForStream(kSsrcs4[0], 24000));
+ EXPECT_TRUE(SetMaxBitrateForStream(kSsrcs4[1], 48000));
+ EXPECT_TRUE(SetMaxBitrateForStream(kSsrcs4[2], -1));
+
+ EXPECT_EQ(24000, GetCodecBitrate(kSsrcs4[0]));
+ EXPECT_EQ(32000, GetCodecBitrate(kSsrcs4[1]));
+ EXPECT_EQ(32000, GetCodecBitrate(kSsrcs4[2]));
+
+ // Remove the global cap; the streams should switch to their respective
+ // maximums (or remain unchanged if there was no other limit on them.)
+ SetGlobalMaxBitrate(kOpusCodec, -1);
+ EXPECT_EQ(24000, GetCodecBitrate(kSsrcs4[0]));
+ EXPECT_EQ(48000, GetCodecBitrate(kSsrcs4[1]));
+ EXPECT_EQ(32000, GetCodecBitrate(kSsrcs4[2]));
+}
+
+// Test that GetRtpSendParameters returns the currently configured codecs.
+TEST_F(WebRtcVoiceEngineTestFake, GetRtpSendParametersCodecs) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ SetSendParameters(parameters);
+
+ webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX);
+ ASSERT_EQ(2u, rtp_parameters.codecs.size());
+ EXPECT_EQ(kIsacCodec.ToCodecParameters(), rtp_parameters.codecs[0]);
+ EXPECT_EQ(kPcmuCodec.ToCodecParameters(), rtp_parameters.codecs[1]);
+}
+
+// Test that GetRtpSendParameters returns an SSRC.
+TEST_F(WebRtcVoiceEngineTestFake, GetRtpSendParametersSsrc) {
+ EXPECT_TRUE(SetupSendStream());
+ webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX);
+ ASSERT_EQ(1u, rtp_parameters.encodings.size());
+ EXPECT_EQ(kSsrcX, rtp_parameters.encodings[0].ssrc);
+}
+
+// Test that if we set/get parameters multiple times, we get the same results.
+TEST_F(WebRtcVoiceEngineTestFake, SetAndGetRtpSendParameters) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ SetSendParameters(parameters);
+
+ webrtc::RtpParameters initial_params = channel_->GetRtpSendParameters(kSsrcX);
+
+ // We should be able to set the params we just got.
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, initial_params));
+
+ // ... And this shouldn't change the params returned by GetRtpSendParameters.
+ webrtc::RtpParameters new_params = channel_->GetRtpSendParameters(kSsrcX);
+ EXPECT_EQ(initial_params, channel_->GetRtpSendParameters(kSsrcX));
+}
+
+// Test that max_bitrate_bps in send stream config gets updated correctly when
+// SetRtpSendParameters is called.
+TEST_F(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesMaxBitrate) {
+ webrtc::test::ScopedFieldTrials override_field_trials(
+ "WebRTC-Audio-SendSideBwe/Enabled/");
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters send_parameters;
+ send_parameters.codecs.push_back(kOpusCodec);
+ SetSendParameters(send_parameters);
+
+ webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX);
+ // Expect empty on parameters.encodings[0].max_bitrate_bps;
+ EXPECT_FALSE(rtp_parameters.encodings[0].max_bitrate_bps);
+
+ constexpr int kMaxBitrateBps = 6000;
+ rtp_parameters.encodings[0].max_bitrate_bps = kMaxBitrateBps;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, rtp_parameters));
+
+ const int max_bitrate = GetSendStreamConfig(kSsrcX).max_bitrate_bps;
+ EXPECT_EQ(max_bitrate, kMaxBitrateBps);
+}
+
+// Test that GetRtpReceiveParameters returns the currently configured codecs.
+TEST_F(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersCodecs) {
+ EXPECT_TRUE(SetupRecvStream());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ webrtc::RtpParameters rtp_parameters =
+ channel_->GetRtpReceiveParameters(kSsrcX);
+ ASSERT_EQ(2u, rtp_parameters.codecs.size());
+ EXPECT_EQ(kIsacCodec.ToCodecParameters(), rtp_parameters.codecs[0]);
+ EXPECT_EQ(kPcmuCodec.ToCodecParameters(), rtp_parameters.codecs[1]);
+}
+
+// Test that GetRtpReceiveParameters returns an SSRC.
+TEST_F(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersSsrc) {
+ EXPECT_TRUE(SetupRecvStream());
+ webrtc::RtpParameters rtp_parameters =
+ channel_->GetRtpReceiveParameters(kSsrcX);
+ ASSERT_EQ(1u, rtp_parameters.encodings.size());
+ EXPECT_EQ(kSsrcX, rtp_parameters.encodings[0].ssrc);
+}
+
+// Test that if we set/get parameters multiple times, we get the same results.
+TEST_F(WebRtcVoiceEngineTestFake, SetAndGetRtpReceiveParameters) {
+ EXPECT_TRUE(SetupRecvStream());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ webrtc::RtpParameters initial_params =
+ channel_->GetRtpReceiveParameters(kSsrcX);
+
+ // We should be able to set the params we just got.
+ EXPECT_TRUE(channel_->SetRtpReceiveParameters(kSsrcX, initial_params));
+
+ // ... And this shouldn't change the params returned by
+ // GetRtpReceiveParameters.
+ webrtc::RtpParameters new_params = channel_->GetRtpReceiveParameters(kSsrcX);
+ EXPECT_EQ(initial_params, channel_->GetRtpReceiveParameters(kSsrcX));
+}
+
+// Test that GetRtpReceiveParameters returns parameters correctly when SSRCs
+// aren't signaled. It should return an empty "RtpEncodingParameters" when
+// configured to receive an unsignaled stream and no packets have been received
+// yet, and start returning the SSRC once a packet has been received.
+TEST_F(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersWithUnsignaledSsrc) {
+ ASSERT_TRUE(SetupChannel());
+ // Call necessary methods to configure receiving a default stream as
+ // soon as it arrives.
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ // Call GetRtpReceiveParameters before configured to receive an unsignaled
+ // stream. Should return nothing.
+ EXPECT_EQ(webrtc::RtpParameters(), channel_->GetRtpReceiveParameters(0));
+
+ // Set a sink for an unsignaled stream.
+ std::unique_ptr<FakeAudioSink> fake_sink(new FakeAudioSink());
+ // Value of "0" means "unsignaled stream".
+ channel_->SetRawAudioSink(0, std::move(fake_sink));
+
+ // Call GetRtpReceiveParameters before the SSRC is known. Value of "0"
+ // in this method means "unsignaled stream".
+ webrtc::RtpParameters rtp_parameters = channel_->GetRtpReceiveParameters(0);
+ ASSERT_EQ(1u, rtp_parameters.encodings.size());
+ EXPECT_FALSE(rtp_parameters.encodings[0].ssrc);
+
+ // Receive PCMU packet (SSRC=1).
+ DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
+
+ // The |ssrc| member should still be unset.
+ rtp_parameters = channel_->GetRtpReceiveParameters(0);
+ ASSERT_EQ(1u, rtp_parameters.encodings.size());
+ EXPECT_FALSE(rtp_parameters.encodings[0].ssrc);
+}
+
+// Test that we apply codecs properly.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecs) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ parameters.codecs.push_back(kCn8000Codec);
+ parameters.codecs[0].id = 96;
+ parameters.codecs[0].bitrate = 22000;
+ SetSendParameters(parameters);
+ const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(96, send_codec_spec.payload_type);
+ EXPECT_EQ(22000, send_codec_spec.target_bitrate_bps);
+ EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str());
+ EXPECT_NE(send_codec_spec.format.clockrate_hz, 8000);
+ EXPECT_EQ(rtc::nullopt, send_codec_spec.cng_payload_type);
+ EXPECT_FALSE(channel_->CanInsertDtmf());
+}
+
+// Test that WebRtcVoiceEngine reconfigures, rather than recreates its
+// AudioSendStream.
+TEST_F(WebRtcVoiceEngineTestFake, DontRecreateSendStream) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ parameters.codecs.push_back(kCn8000Codec);
+ parameters.codecs[0].id = 96;
+ parameters.codecs[0].bitrate = 48000;
+ const int initial_num = call_.GetNumCreatedSendStreams();
+ SetSendParameters(parameters);
+ EXPECT_EQ(initial_num, call_.GetNumCreatedSendStreams());
+ // Calling SetSendCodec again with same codec which is already set.
+ // In this case media channel shouldn't send codec to VoE.
+ SetSendParameters(parameters);
+ EXPECT_EQ(initial_num, call_.GetNumCreatedSendStreams());
+}
+
+// TODO(ossu): Revisit if these tests need to be here, now that these kinds of
+// tests should be available in AudioEncoderOpusTest.
+
+// Test that if clockrate is not 48000 for opus, we fail.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusBadClockrate) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].bitrate = 0;
+ parameters.codecs[0].clockrate = 50000;
+ EXPECT_FALSE(channel_->SetSendParameters(parameters));
+}
+
+// Test that if channels=0 for opus, we fail.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusBad0ChannelsNoStereo) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].bitrate = 0;
+ parameters.codecs[0].channels = 0;
+ EXPECT_FALSE(channel_->SetSendParameters(parameters));
+}
+
+// Test that if channels=0 for opus, we fail.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusBad0Channels1Stereo) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].bitrate = 0;
+ parameters.codecs[0].channels = 0;
+ parameters.codecs[0].params["stereo"] = "1";
+ EXPECT_FALSE(channel_->SetSendParameters(parameters));
+}
+
+// Test that if channel is 1 for opus and there's no stereo, we fail.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpus1ChannelNoStereo) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].bitrate = 0;
+ parameters.codecs[0].channels = 1;
+ EXPECT_FALSE(channel_->SetSendParameters(parameters));
+}
+
+// Test that if channel is 1 for opus and stereo=0, we fail.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusBad1Channel0Stereo) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].bitrate = 0;
+ parameters.codecs[0].channels = 1;
+ parameters.codecs[0].params["stereo"] = "0";
+ EXPECT_FALSE(channel_->SetSendParameters(parameters));
+}
+
+// Test that if channel is 1 for opus and stereo=1, we fail.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusBad1Channel1Stereo) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].bitrate = 0;
+ parameters.codecs[0].channels = 1;
+ parameters.codecs[0].params["stereo"] = "1";
+ EXPECT_FALSE(channel_->SetSendParameters(parameters));
+}
+
+// Test that with bitrate=0 and no stereo, bitrate is 32000.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0BitrateNoStereo) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].bitrate = 0;
+ SetSendParameters(parameters);
+ CheckSendCodecBitrate(kSsrcX, "opus", 32000);
+}
+
+// Test that with bitrate=0 and stereo=0, bitrate is 32000.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0Bitrate0Stereo) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].bitrate = 0;
+ parameters.codecs[0].params["stereo"] = "0";
+ SetSendParameters(parameters);
+ CheckSendCodecBitrate(kSsrcX, "opus", 32000);
+}
+
+// Test that with bitrate=invalid and stereo=0, bitrate is 32000.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodXBitrate0Stereo) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].params["stereo"] = "0";
+ // bitrate that's out of the range between 6000 and 510000 will be clamped.
+ parameters.codecs[0].bitrate = 5999;
+ SetSendParameters(parameters);
+ CheckSendCodecBitrate(kSsrcX, "opus", 6000);
+
+ parameters.codecs[0].bitrate = 510001;
+ SetSendParameters(parameters);
+ CheckSendCodecBitrate(kSsrcX, "opus", 510000);
+}
+
+// Test that with bitrate=0 and stereo=1, bitrate is 64000.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0Bitrate1Stereo) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].bitrate = 0;
+ parameters.codecs[0].params["stereo"] = "1";
+ SetSendParameters(parameters);
+ CheckSendCodecBitrate(kSsrcX, "opus", 64000);
+}
+
+// Test that with bitrate=invalid and stereo=1, bitrate is 64000.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodXBitrate1Stereo) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].params["stereo"] = "1";
+ // bitrate that's out of the range between 6000 and 510000 will be clamped.
+ parameters.codecs[0].bitrate = 5999;
+ SetSendParameters(parameters);
+ CheckSendCodecBitrate(kSsrcX, "opus", 6000);
+
+ parameters.codecs[0].bitrate = 510001;
+ SetSendParameters(parameters);
+ CheckSendCodecBitrate(kSsrcX, "opus", 510000);
+}
+
+// Test that with bitrate=N and stereo unset, bitrate is N.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrateNoStereo) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].bitrate = 96000;
+ SetSendParameters(parameters);
+ const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(111, spec.payload_type);
+ EXPECT_EQ(96000, spec.target_bitrate_bps);
+ EXPECT_EQ("opus", spec.format.name);
+ EXPECT_EQ(2, spec.format.num_channels);
+ EXPECT_EQ(48000, spec.format.clockrate_hz);
+}
+
+// Test that with bitrate=N and stereo=0, bitrate is N.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrate0Stereo) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].bitrate = 30000;
+ parameters.codecs[0].params["stereo"] = "0";
+ SetSendParameters(parameters);
+ CheckSendCodecBitrate(kSsrcX, "opus", 30000);
+}
+
+// Test that with bitrate=N and without any parameters, bitrate is N.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrateNoParameters) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].bitrate = 30000;
+ SetSendParameters(parameters);
+ CheckSendCodecBitrate(kSsrcX, "opus", 30000);
+}
+
+// Test that with bitrate=N and stereo=1, bitrate is N.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrate1Stereo) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].bitrate = 30000;
+ parameters.codecs[0].params["stereo"] = "1";
+ SetSendParameters(parameters);
+ CheckSendCodecBitrate(kSsrcX, "opus", 30000);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsWithBitrates) {
+ SetSendCodecsShouldWorkForBitrates("100", 100000, "150", 150000, "200",
+ 200000);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsWithHighMaxBitrate) {
+ SetSendCodecsShouldWorkForBitrates("", 0, "", -1, "10000", 10000000);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake,
+ SetSendCodecsWithoutBitratesUsesCorrectDefaults) {
+ SetSendCodecsShouldWorkForBitrates("", 0, "", -1, "", -1);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCapsMinAndStartBitrate) {
+ SetSendCodecsShouldWorkForBitrates("-1", 0, "-100", -1, "", -1);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake,
+ SetMaxSendBandwidthForAudioDoesntAffectBwe) {
+ SetSendCodecsShouldWorkForBitrates("100", 100000, "150", 150000, "200",
+ 200000);
+ send_parameters_.max_bandwidth_bps = 100000;
+ SetSendParameters(send_parameters_);
+ EXPECT_EQ(100000, call_.GetConfig().bitrate_config.min_bitrate_bps)
+ << "Setting max bitrate should keep previous min bitrate.";
+ EXPECT_EQ(-1, call_.GetConfig().bitrate_config.start_bitrate_bps)
+ << "Setting max bitrate should not reset start bitrate.";
+ EXPECT_EQ(200000, call_.GetConfig().bitrate_config.max_bitrate_bps);
+}
+
+// Test that we can enable NACK with opus as caller.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackAsCaller) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].AddFeedbackParam(
+ cricket::FeedbackParam(cricket::kRtcpFbParamNack,
+ cricket::kParamValueEmpty));
+ EXPECT_EQ(0, GetSendStreamConfig(kSsrcX).rtp.nack.rtp_history_ms);
+ SetSendParameters(parameters);
+ EXPECT_EQ(kRtpHistoryMs, GetSendStreamConfig(kSsrcX).rtp.nack.rtp_history_ms);
+}
+
+// Test that we can enable NACK with opus as callee.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackAsCallee) {
+ EXPECT_TRUE(SetupRecvStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].AddFeedbackParam(
+ cricket::FeedbackParam(cricket::kRtcpFbParamNack,
+ cricket::kParamValueEmpty));
+ EXPECT_EQ(0, GetRecvStreamConfig(kSsrcX).rtp.nack.rtp_history_ms);
+ SetSendParameters(parameters);
+ // NACK should be enabled even with no send stream.
+ EXPECT_EQ(kRtpHistoryMs, GetRecvStreamConfig(kSsrcX).rtp.nack.rtp_history_ms);
+
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrcX)));
+ EXPECT_EQ(kRtpHistoryMs, GetSendStreamConfig(kSsrcX).rtp.nack.rtp_history_ms);
+}
+
+// Test that we can enable NACK on receive streams.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackRecvStreams) {
+ EXPECT_TRUE(SetupSendStream());
+ EXPECT_TRUE(AddRecvStream(kSsrcY));
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].AddFeedbackParam(
+ cricket::FeedbackParam(cricket::kRtcpFbParamNack,
+ cricket::kParamValueEmpty));
+ EXPECT_EQ(0, GetSendStreamConfig(kSsrcX).rtp.nack.rtp_history_ms);
+ EXPECT_EQ(0, GetRecvStreamConfig(kSsrcY).rtp.nack.rtp_history_ms);
+ SetSendParameters(parameters);
+ EXPECT_EQ(kRtpHistoryMs, GetSendStreamConfig(kSsrcX).rtp.nack.rtp_history_ms);
+ EXPECT_EQ(kRtpHistoryMs, GetRecvStreamConfig(kSsrcY).rtp.nack.rtp_history_ms);
+}
+
+// Test that we can disable NACK.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecDisableNack) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].AddFeedbackParam(
+ cricket::FeedbackParam(cricket::kRtcpFbParamNack,
+ cricket::kParamValueEmpty));
+ SetSendParameters(parameters);
+ EXPECT_EQ(kRtpHistoryMs, GetSendStreamConfig(kSsrcX).rtp.nack.rtp_history_ms);
+
+ parameters.codecs.clear();
+ parameters.codecs.push_back(kOpusCodec);
+ SetSendParameters(parameters);
+ EXPECT_EQ(0, GetSendStreamConfig(kSsrcX).rtp.nack.rtp_history_ms);
+}
+
+// Test that we can disable NACK on receive streams.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecDisableNackRecvStreams) {
+ EXPECT_TRUE(SetupSendStream());
+ EXPECT_TRUE(AddRecvStream(kSsrcY));
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs[0].AddFeedbackParam(
+ cricket::FeedbackParam(cricket::kRtcpFbParamNack,
+ cricket::kParamValueEmpty));
+ SetSendParameters(parameters);
+ EXPECT_EQ(kRtpHistoryMs, GetSendStreamConfig(kSsrcX).rtp.nack.rtp_history_ms);
+ EXPECT_EQ(kRtpHistoryMs, GetRecvStreamConfig(kSsrcY).rtp.nack.rtp_history_ms);
+
+ parameters.codecs.clear();
+ parameters.codecs.push_back(kOpusCodec);
+ SetSendParameters(parameters);
+ EXPECT_EQ(0, GetSendStreamConfig(kSsrcX).rtp.nack.rtp_history_ms);
+ EXPECT_EQ(0, GetRecvStreamConfig(kSsrcY).rtp.nack.rtp_history_ms);
+}
+
+// Test that NACK is enabled on a new receive stream.
+TEST_F(WebRtcVoiceEngineTestFake, AddRecvStreamEnableNack) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kCn16000Codec);
+ parameters.codecs[0].AddFeedbackParam(
+ cricket::FeedbackParam(cricket::kRtcpFbParamNack,
+ cricket::kParamValueEmpty));
+ SetSendParameters(parameters);
+ EXPECT_EQ(kRtpHistoryMs, GetSendStreamConfig(kSsrcX).rtp.nack.rtp_history_ms);
+
+ EXPECT_TRUE(AddRecvStream(kSsrcY));
+ EXPECT_EQ(kRtpHistoryMs, GetRecvStreamConfig(kSsrcY).rtp.nack.rtp_history_ms);
+ EXPECT_TRUE(AddRecvStream(kSsrcZ));
+ EXPECT_EQ(kRtpHistoryMs, GetRecvStreamConfig(kSsrcZ).rtp.nack.rtp_history_ms);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, TransportCcCanBeEnabledAndDisabled) {
+ EXPECT_TRUE(SetupChannel());
+ cricket::AudioSendParameters send_parameters;
+ send_parameters.codecs.push_back(kOpusCodec);
+ EXPECT_TRUE(send_parameters.codecs[0].feedback_params.params().empty());
+ SetSendParameters(send_parameters);
+
+ cricket::AudioRecvParameters recv_parameters;
+ recv_parameters.codecs.push_back(kIsacCodec);
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters));
+ EXPECT_TRUE(AddRecvStream(kSsrcX));
+ ASSERT_TRUE(call_.GetAudioReceiveStream(kSsrcX) != nullptr);
+ EXPECT_FALSE(
+ call_.GetAudioReceiveStream(kSsrcX)->GetConfig().rtp.transport_cc);
+
+ send_parameters.codecs = engine_->send_codecs();
+ SetSendParameters(send_parameters);
+ ASSERT_TRUE(call_.GetAudioReceiveStream(kSsrcX) != nullptr);
+ EXPECT_TRUE(
+ call_.GetAudioReceiveStream(kSsrcX)->GetConfig().rtp.transport_cc);
+}
+
+// Test that we can switch back and forth between Opus and ISAC with CN.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsIsacOpusSwitching) {
+ EXPECT_TRUE(SetupSendStream());
+
+ cricket::AudioSendParameters opus_parameters;
+ opus_parameters.codecs.push_back(kOpusCodec);
+ SetSendParameters(opus_parameters);
+ {
+ const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(111, spec.payload_type);
+ EXPECT_STRCASEEQ("opus", spec.format.name.c_str());
+ }
+
+ cricket::AudioSendParameters isac_parameters;
+ isac_parameters.codecs.push_back(kIsacCodec);
+ isac_parameters.codecs.push_back(kCn16000Codec);
+ isac_parameters.codecs.push_back(kOpusCodec);
+ SetSendParameters(isac_parameters);
+ {
+ const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(103, spec.payload_type);
+ EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str());
+ }
+
+ SetSendParameters(opus_parameters);
+ {
+ const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(111, spec.payload_type);
+ EXPECT_STRCASEEQ("opus", spec.format.name.c_str());
+ }
+}
+
+// Test that we handle various ways of specifying bitrate.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsBitrate) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kIsacCodec); // bitrate == 32000
+ SetSendParameters(parameters);
+ {
+ const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(103, spec.payload_type);
+ EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str());
+ EXPECT_EQ(32000, spec.target_bitrate_bps);
+ }
+
+ parameters.codecs[0].bitrate = 0; // bitrate == default
+ SetSendParameters(parameters);
+ {
+ const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(103, spec.payload_type);
+ EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str());
+ EXPECT_EQ(32000, spec.target_bitrate_bps);
+ }
+ parameters.codecs[0].bitrate = 28000; // bitrate == 28000
+ SetSendParameters(parameters);
+ {
+ const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(103, spec.payload_type);
+ EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str());
+ EXPECT_EQ(28000, spec.target_bitrate_bps);
+ }
+
+ parameters.codecs[0] = kPcmuCodec; // bitrate == 64000
+ SetSendParameters(parameters);
+ {
+ const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(0, spec.payload_type);
+ EXPECT_STRCASEEQ("PCMU", spec.format.name.c_str());
+ EXPECT_EQ(64000, spec.target_bitrate_bps);
+ }
+
+ parameters.codecs[0].bitrate = 0; // bitrate == default
+ SetSendParameters(parameters);
+ {
+ const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(0, spec.payload_type);
+ EXPECT_STREQ("PCMU", spec.format.name.c_str());
+ EXPECT_EQ(64000, spec.target_bitrate_bps);
+ }
+
+ parameters.codecs[0] = kOpusCodec;
+ parameters.codecs[0].bitrate = 0; // bitrate == default
+ SetSendParameters(parameters);
+ {
+ const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(111, spec.payload_type);
+ EXPECT_STREQ("opus", spec.format.name.c_str());
+ EXPECT_EQ(32000, spec.target_bitrate_bps);
+ }
+}
+
+// Test that we fail if no codecs are specified.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsNoCodecs) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ EXPECT_FALSE(channel_->SetSendParameters(parameters));
+}
+
+// Test that we can set send codecs even with telephone-event codec as the first
+// one on the list.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFOnTop) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kTelephoneEventCodec1);
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ parameters.codecs[0].id = 98; // DTMF
+ parameters.codecs[1].id = 96;
+ SetSendParameters(parameters);
+ const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(96, spec.payload_type);
+ EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str());
+ EXPECT_TRUE(channel_->CanInsertDtmf());
+}
+
+// Test that payload type range is limited for telephone-event codec.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFPayloadTypeOutOfRange) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kTelephoneEventCodec2);
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs[0].id = 0; // DTMF
+ parameters.codecs[1].id = 96;
+ SetSendParameters(parameters);
+ EXPECT_TRUE(channel_->CanInsertDtmf());
+ parameters.codecs[0].id = 128; // DTMF
+ EXPECT_FALSE(channel_->SetSendParameters(parameters));
+ EXPECT_FALSE(channel_->CanInsertDtmf());
+ parameters.codecs[0].id = 127;
+ SetSendParameters(parameters);
+ EXPECT_TRUE(channel_->CanInsertDtmf());
+ parameters.codecs[0].id = -1; // DTMF
+ EXPECT_FALSE(channel_->SetSendParameters(parameters));
+ EXPECT_FALSE(channel_->CanInsertDtmf());
+}
+
+// Test that we can set send codecs even with CN codec as the first
+// one on the list.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCNOnTop) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kCn16000Codec);
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ parameters.codecs[0].id = 98; // wideband CN
+ parameters.codecs[1].id = 96;
+ SetSendParameters(parameters);
+ const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(96, send_codec_spec.payload_type);
+ EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str());
+ EXPECT_EQ(98, send_codec_spec.cng_payload_type);
+}
+
+// Test that we set VAD and DTMF types correctly as caller.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCaller) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ // TODO(juberti): cn 32000
+ parameters.codecs.push_back(kCn16000Codec);
+ parameters.codecs.push_back(kCn8000Codec);
+ parameters.codecs.push_back(kTelephoneEventCodec1);
+ parameters.codecs[0].id = 96;
+ parameters.codecs[2].id = 97; // wideband CN
+ parameters.codecs[4].id = 98; // DTMF
+ SetSendParameters(parameters);
+ const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(96, send_codec_spec.payload_type);
+ EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str());
+ EXPECT_EQ(1, send_codec_spec.format.num_channels);
+ EXPECT_EQ(97, send_codec_spec.cng_payload_type);
+ EXPECT_TRUE(channel_->CanInsertDtmf());
+}
+
+// Test that we set VAD and DTMF types correctly as callee.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCallee) {
+ EXPECT_TRUE(SetupChannel());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ // TODO(juberti): cn 32000
+ parameters.codecs.push_back(kCn16000Codec);
+ parameters.codecs.push_back(kCn8000Codec);
+ parameters.codecs.push_back(kTelephoneEventCodec2);
+ parameters.codecs[0].id = 96;
+ parameters.codecs[2].id = 97; // wideband CN
+ parameters.codecs[4].id = 98; // DTMF
+ SetSendParameters(parameters);
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrcX)));
+
+ const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(96, send_codec_spec.payload_type);
+ EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str());
+ EXPECT_EQ(1, send_codec_spec.format.num_channels);
+ EXPECT_EQ(97, send_codec_spec.cng_payload_type);
+ EXPECT_TRUE(channel_->CanInsertDtmf());
+}
+
+// Test that we only apply VAD if we have a CN codec that matches the
+// send codec clockrate.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCNNoMatch) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ // Set ISAC(16K) and CN(16K). VAD should be activated.
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kCn16000Codec);
+ parameters.codecs[1].id = 97;
+ SetSendParameters(parameters);
+ {
+ const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str());
+ EXPECT_EQ(1, send_codec_spec.format.num_channels);
+ EXPECT_EQ(97, send_codec_spec.cng_payload_type);
+ }
+ // Set PCMU(8K) and CN(16K). VAD should not be activated.
+ parameters.codecs[0] = kPcmuCodec;
+ SetSendParameters(parameters);
+ {
+ const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str());
+ EXPECT_EQ(rtc::nullopt, send_codec_spec.cng_payload_type);
+ }
+ // Set PCMU(8K) and CN(8K). VAD should be activated.
+ parameters.codecs[1] = kCn8000Codec;
+ SetSendParameters(parameters);
+ {
+ const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str());
+ EXPECT_EQ(1, send_codec_spec.format.num_channels);
+ EXPECT_EQ(13, send_codec_spec.cng_payload_type);
+ }
+ // Set ISAC(16K) and CN(8K). VAD should not be activated.
+ parameters.codecs[0] = kIsacCodec;
+ SetSendParameters(parameters);
+ {
+ const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str());
+ EXPECT_EQ(rtc::nullopt, send_codec_spec.cng_payload_type);
+ }
+}
+
+// Test that we perform case-insensitive matching of codec names.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCaseInsensitive) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ parameters.codecs.push_back(kCn16000Codec);
+ parameters.codecs.push_back(kCn8000Codec);
+ parameters.codecs.push_back(kTelephoneEventCodec1);
+ parameters.codecs[0].name = "iSaC";
+ parameters.codecs[0].id = 96;
+ parameters.codecs[2].id = 97; // wideband CN
+ parameters.codecs[4].id = 98; // DTMF
+ SetSendParameters(parameters);
+ const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(96, send_codec_spec.payload_type);
+ EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str());
+ EXPECT_EQ(1, send_codec_spec.format.num_channels);
+ EXPECT_EQ(97, send_codec_spec.cng_payload_type);
+ EXPECT_TRUE(channel_->CanInsertDtmf());
+}
+
+class WebRtcVoiceEngineWithSendSideBweTest : public WebRtcVoiceEngineTestFake {
+ public:
+ WebRtcVoiceEngineWithSendSideBweTest()
+ : WebRtcVoiceEngineTestFake("WebRTC-Audio-SendSideBwe/Enabled/") {}
+};
+
+TEST_F(WebRtcVoiceEngineWithSendSideBweTest,
+ SupportsTransportSequenceNumberHeaderExtension) {
+ cricket::RtpCapabilities capabilities = engine_->GetCapabilities();
+ ASSERT_FALSE(capabilities.header_extensions.empty());
+ for (const webrtc::RtpExtension& extension : capabilities.header_extensions) {
+ if (extension.uri == webrtc::RtpExtension::kTransportSequenceNumberUri) {
+ EXPECT_EQ(webrtc::RtpExtension::kTransportSequenceNumberDefaultId,
+ extension.id);
+ return;
+ }
+ }
+ FAIL() << "Transport sequence number extension not in header-extension list.";
+}
+
+// Test support for audio level header extension.
+TEST_F(WebRtcVoiceEngineTestFake, SendAudioLevelHeaderExtensions) {
+ TestSetSendRtpHeaderExtensions(webrtc::RtpExtension::kAudioLevelUri);
+}
+TEST_F(WebRtcVoiceEngineTestFake, RecvAudioLevelHeaderExtensions) {
+ TestSetRecvRtpHeaderExtensions(webrtc::RtpExtension::kAudioLevelUri);
+}
+
+// Test support for transport sequence number header extension.
+TEST_F(WebRtcVoiceEngineTestFake, SendTransportSequenceNumberHeaderExtensions) {
+ TestSetSendRtpHeaderExtensions(
+ webrtc::RtpExtension::kTransportSequenceNumberUri);
+}
+TEST_F(WebRtcVoiceEngineTestFake, RecvTransportSequenceNumberHeaderExtensions) {
+ TestSetRecvRtpHeaderExtensions(
+ webrtc::RtpExtension::kTransportSequenceNumberUri);
+}
+
+// Test that we can create a channel and start sending on it.
+TEST_F(WebRtcVoiceEngineTestFake, Send) {
+ EXPECT_TRUE(SetupSendStream());
+ SetSendParameters(send_parameters_);
+ SetSend(true);
+ EXPECT_TRUE(GetSendStream(kSsrcX).IsSending());
+ SetSend(false);
+ EXPECT_FALSE(GetSendStream(kSsrcX).IsSending());
+}
+
+// Test that a channel will send if and only if it has a source and is enabled
+// for sending.
+TEST_F(WebRtcVoiceEngineTestFake, SendStateWithAndWithoutSource) {
+ EXPECT_TRUE(SetupSendStream());
+ SetSendParameters(send_parameters_);
+ SetAudioSend(kSsrcX, true, nullptr);
+ SetSend(true);
+ EXPECT_FALSE(GetSendStream(kSsrcX).IsSending());
+ SetAudioSend(kSsrcX, true, &fake_source_);
+ EXPECT_TRUE(GetSendStream(kSsrcX).IsSending());
+ SetAudioSend(kSsrcX, true, nullptr);
+ EXPECT_FALSE(GetSendStream(kSsrcX).IsSending());
+}
+
+// Test that a channel is muted/unmuted.
+TEST_F(WebRtcVoiceEngineTestFake, SendStateMuteUnmute) {
+ EXPECT_TRUE(SetupSendStream());
+ SetSendParameters(send_parameters_);
+ EXPECT_FALSE(GetSendStream(kSsrcX).muted());
+ SetAudioSend(kSsrcX, true, nullptr);
+ EXPECT_FALSE(GetSendStream(kSsrcX).muted());
+ SetAudioSend(kSsrcX, false, nullptr);
+ EXPECT_TRUE(GetSendStream(kSsrcX).muted());
+}
+
+// Test that SetSendParameters() does not alter a stream's send state.
+TEST_F(WebRtcVoiceEngineTestFake, SendStateWhenStreamsAreRecreated) {
+ EXPECT_TRUE(SetupSendStream());
+ EXPECT_FALSE(GetSendStream(kSsrcX).IsSending());
+
+ // Turn on sending.
+ SetSend(true);
+ EXPECT_TRUE(GetSendStream(kSsrcX).IsSending());
+
+ // Changing RTP header extensions will recreate the AudioSendStream.
+ send_parameters_.extensions.push_back(
+ webrtc::RtpExtension(webrtc::RtpExtension::kAudioLevelUri, 12));
+ SetSendParameters(send_parameters_);
+ EXPECT_TRUE(GetSendStream(kSsrcX).IsSending());
+
+ // Turn off sending.
+ SetSend(false);
+ EXPECT_FALSE(GetSendStream(kSsrcX).IsSending());
+
+ // Changing RTP header extensions will recreate the AudioSendStream.
+ send_parameters_.extensions.clear();
+ SetSendParameters(send_parameters_);
+ EXPECT_FALSE(GetSendStream(kSsrcX).IsSending());
+}
+
+// Test that we can create a channel and start playing out on it.
+TEST_F(WebRtcVoiceEngineTestFake, Playout) {
+ EXPECT_TRUE(SetupRecvStream());
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ channel_->SetPlayout(true);
+ EXPECT_TRUE(GetRecvStream(kSsrcX).started());
+ channel_->SetPlayout(false);
+ EXPECT_FALSE(GetRecvStream(kSsrcX).started());
+}
+
+// Test that we can add and remove send streams.
+TEST_F(WebRtcVoiceEngineTestFake, CreateAndDeleteMultipleSendStreams) {
+ SetupForMultiSendStream();
+
+ // Set the global state for sending.
+ SetSend(true);
+
+ for (uint32_t ssrc : kSsrcs4) {
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(ssrc)));
+ SetAudioSend(ssrc, true, &fake_source_);
+ // Verify that we are in a sending state for all the created streams.
+ EXPECT_TRUE(GetSendStream(ssrc).IsSending());
+ }
+ EXPECT_EQ(arraysize(kSsrcs4), call_.GetAudioSendStreams().size());
+
+ // Delete the send streams.
+ for (uint32_t ssrc : kSsrcs4) {
+ EXPECT_TRUE(channel_->RemoveSendStream(ssrc));
+ EXPECT_FALSE(call_.GetAudioSendStream(ssrc));
+ EXPECT_FALSE(channel_->RemoveSendStream(ssrc));
+ }
+ EXPECT_EQ(0u, call_.GetAudioSendStreams().size());
+}
+
+// Test SetSendCodecs correctly configure the codecs in all send streams.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsWithMultipleSendStreams) {
+ SetupForMultiSendStream();
+
+ // Create send streams.
+ for (uint32_t ssrc : kSsrcs4) {
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(ssrc)));
+ }
+
+ cricket::AudioSendParameters parameters;
+ // Set ISAC(16K) and CN(16K). VAD should be activated.
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kCn16000Codec);
+ parameters.codecs[1].id = 97;
+ SetSendParameters(parameters);
+
+ // Verify ISAC and VAD are corrected configured on all send channels.
+ for (uint32_t ssrc : kSsrcs4) {
+ ASSERT_TRUE(call_.GetAudioSendStream(ssrc) != nullptr);
+ const auto& send_codec_spec =
+ *call_.GetAudioSendStream(ssrc)->GetConfig().send_codec_spec;
+ EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str());
+ EXPECT_EQ(1, send_codec_spec.format.num_channels);
+ EXPECT_EQ(97, send_codec_spec.cng_payload_type);
+ }
+
+ // Change to PCMU(8K) and CN(16K).
+ parameters.codecs[0] = kPcmuCodec;
+ SetSendParameters(parameters);
+ for (uint32_t ssrc : kSsrcs4) {
+ ASSERT_TRUE(call_.GetAudioSendStream(ssrc) != nullptr);
+ const auto& send_codec_spec =
+ *call_.GetAudioSendStream(ssrc)->GetConfig().send_codec_spec;
+ EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str());
+ EXPECT_EQ(rtc::nullopt, send_codec_spec.cng_payload_type);
+ }
+}
+
+// Test we can SetSend on all send streams correctly.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendWithMultipleSendStreams) {
+ SetupForMultiSendStream();
+
+ // Create the send channels and they should be a "not sending" date.
+ for (uint32_t ssrc : kSsrcs4) {
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(ssrc)));
+ SetAudioSend(ssrc, true, &fake_source_);
+ EXPECT_FALSE(GetSendStream(ssrc).IsSending());
+ }
+
+ // Set the global state for starting sending.
+ SetSend(true);
+ for (uint32_t ssrc : kSsrcs4) {
+ // Verify that we are in a sending state for all the send streams.
+ EXPECT_TRUE(GetSendStream(ssrc).IsSending());
+ }
+
+ // Set the global state for stopping sending.
+ SetSend(false);
+ for (uint32_t ssrc : kSsrcs4) {
+ // Verify that we are in a stop state for all the send streams.
+ EXPECT_FALSE(GetSendStream(ssrc).IsSending());
+ }
+}
+
+// Test we can set the correct statistics on all send streams.
+TEST_F(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) {
+ SetupForMultiSendStream();
+
+ // Create send streams.
+ for (uint32_t ssrc : kSsrcs4) {
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(ssrc)));
+ }
+
+ // Create a receive stream to check that none of the send streams end up in
+ // the receive stream stats.
+ EXPECT_TRUE(AddRecvStream(kSsrcY));
+
+ // We need send codec to be set to get all stats.
+ SetSendParameters(send_parameters_);
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ SetAudioSendStreamStats();
+
+ // Check stats for the added streams.
+ {
+ cricket::VoiceMediaInfo info;
+ EXPECT_EQ(true, channel_->GetStats(&info));
+
+ // We have added 4 send streams. We should see empty stats for all.
+ EXPECT_EQ(static_cast<size_t>(arraysize(kSsrcs4)), info.senders.size());
+ for (const auto& sender : info.senders) {
+ VerifyVoiceSenderInfo(sender, false);
+ }
+ VerifyVoiceSendRecvCodecs(info);
+
+ // We have added one receive stream. We should see empty stats.
+ EXPECT_EQ(info.receivers.size(), 1u);
+ EXPECT_EQ(info.receivers[0].ssrc(), 0);
+ }
+
+ // Remove the kSsrcY stream. No receiver stats.
+ {
+ cricket::VoiceMediaInfo info;
+ EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcY));
+ EXPECT_EQ(true, channel_->GetStats(&info));
+ EXPECT_EQ(static_cast<size_t>(arraysize(kSsrcs4)), info.senders.size());
+ EXPECT_EQ(0u, info.receivers.size());
+ }
+
+ // Deliver a new packet - a default receive stream should be created and we
+ // should see stats again.
+ {
+ cricket::VoiceMediaInfo info;
+ DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
+ SetAudioReceiveStreamStats();
+ EXPECT_EQ(true, channel_->GetStats(&info));
+ EXPECT_EQ(static_cast<size_t>(arraysize(kSsrcs4)), info.senders.size());
+ EXPECT_EQ(1u, info.receivers.size());
+ VerifyVoiceReceiverInfo(info.receivers[0]);
+ VerifyVoiceSendRecvCodecs(info);
+ }
+}
+
+// Test that we can add and remove receive streams, and do proper send/playout.
+// We can receive on multiple streams while sending one stream.
+TEST_F(WebRtcVoiceEngineTestFake, PlayoutWithMultipleStreams) {
+ EXPECT_TRUE(SetupSendStream());
+
+ // Start playout without a receive stream.
+ SetSendParameters(send_parameters_);
+ channel_->SetPlayout(true);
+
+ // Adding another stream should enable playout on the new stream only.
+ EXPECT_TRUE(AddRecvStream(kSsrcY));
+ SetSend(true);
+ EXPECT_TRUE(GetSendStream(kSsrcX).IsSending());
+
+ // Make sure only the new stream is played out.
+ EXPECT_TRUE(GetRecvStream(kSsrcY).started());
+
+ // Adding yet another stream should have stream 2 and 3 enabled for playout.
+ EXPECT_TRUE(AddRecvStream(kSsrcZ));
+ EXPECT_TRUE(GetRecvStream(kSsrcY).started());
+ EXPECT_TRUE(GetRecvStream(kSsrcZ).started());
+
+ // Stop sending.
+ SetSend(false);
+ EXPECT_FALSE(GetSendStream(kSsrcX).IsSending());
+
+ // Stop playout.
+ channel_->SetPlayout(false);
+ EXPECT_FALSE(GetRecvStream(kSsrcY).started());
+ EXPECT_FALSE(GetRecvStream(kSsrcZ).started());
+
+ // Restart playout and make sure recv streams are played out.
+ channel_->SetPlayout(true);
+ EXPECT_TRUE(GetRecvStream(kSsrcY).started());
+ EXPECT_TRUE(GetRecvStream(kSsrcZ).started());
+
+ // Now remove the recv streams.
+ EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcZ));
+ EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcY));
+}
+
+// Test that we can create a channel configured for Codian bridges,
+// and start sending on it.
+TEST_F(WebRtcVoiceEngineTestFake, CodianSend) {
+ EXPECT_TRUE(SetupSendStream());
+ send_parameters_.options.adjust_agc_delta = -10;
+ EXPECT_CALL(apm_gc_,
+ set_target_level_dbfs(11)).Times(2).WillRepeatedly(Return(0));
+ SetSendParameters(send_parameters_);
+ SetSend(true);
+ EXPECT_TRUE(GetSendStream(kSsrcX).IsSending());
+ SetSend(false);
+ EXPECT_FALSE(GetSendStream(kSsrcX).IsSending());
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, TxAgcConfigViaOptions) {
+ EXPECT_TRUE(SetupSendStream());
+ EXPECT_CALL(adm_,
+ BuiltInAGCIsAvailable()).Times(2).WillRepeatedly(Return(false));
+ EXPECT_CALL(adm_, SetAGC(true)).Times(2).WillRepeatedly(Return(0));
+ EXPECT_CALL(apm_gc_, set_mode(kDefaultAgcMode)).Times(2).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, Enable(true)).Times(2).WillOnce(Return(0));
+ send_parameters_.options.tx_agc_target_dbov = 3;
+ send_parameters_.options.tx_agc_digital_compression_gain = 9;
+ send_parameters_.options.tx_agc_limiter = true;
+ send_parameters_.options.auto_gain_control = true;
+ EXPECT_CALL(apm_gc_, set_target_level_dbfs(3)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, set_compression_gain_db(9)).WillRepeatedly(Return(0));
+ EXPECT_CALL(apm_gc_, enable_limiter(true)).WillRepeatedly(Return(0));
+ SetSendParameters(send_parameters_);
+
+ // Check interaction with adjust_agc_delta. Both should be respected, for
+ // backwards compatibility.
+ send_parameters_.options.adjust_agc_delta = -10;
+ EXPECT_CALL(apm_gc_, set_target_level_dbfs(13)).WillOnce(Return(0));
+ SetSendParameters(send_parameters_);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetAudioNetworkAdaptorViaOptions) {
+ EXPECT_TRUE(SetupSendStream());
+ send_parameters_.options.audio_network_adaptor = true;
+ send_parameters_.options.audio_network_adaptor_config = {"1234"};
+ SetSendParameters(send_parameters_);
+ EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config,
+ GetAudioNetworkAdaptorConfig(kSsrcX));
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, AudioSendResetAudioNetworkAdaptor) {
+ EXPECT_TRUE(SetupSendStream());
+ send_parameters_.options.audio_network_adaptor = true;
+ send_parameters_.options.audio_network_adaptor_config = {"1234"};
+ SetSendParameters(send_parameters_);
+ EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config,
+ GetAudioNetworkAdaptorConfig(kSsrcX));
+ cricket::AudioOptions options;
+ options.audio_network_adaptor = false;
+ SetAudioSend(kSsrcX, true, nullptr, &options);
+ EXPECT_EQ(rtc::nullopt, GetAudioNetworkAdaptorConfig(kSsrcX));
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, AudioNetworkAdaptorNotGetOverridden) {
+ EXPECT_TRUE(SetupSendStream());
+ send_parameters_.options.audio_network_adaptor = true;
+ send_parameters_.options.audio_network_adaptor_config = {"1234"};
+ SetSendParameters(send_parameters_);
+ EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config,
+ GetAudioNetworkAdaptorConfig(kSsrcX));
+ const int initial_num = call_.GetNumCreatedSendStreams();
+ cricket::AudioOptions options;
+ options.audio_network_adaptor = rtc::nullopt;
+ // Unvalued |options.audio_network_adaptor|.should not reset audio network
+ // adaptor.
+ SetAudioSend(kSsrcX, true, nullptr, &options);
+ // AudioSendStream not expected to be recreated.
+ EXPECT_EQ(initial_num, call_.GetNumCreatedSendStreams());
+ EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config,
+ GetAudioNetworkAdaptorConfig(kSsrcX));
+}
+
+class WebRtcVoiceEngineWithSendSideBweWithOverheadTest
+ : public WebRtcVoiceEngineTestFake {
+ public:
+ WebRtcVoiceEngineWithSendSideBweWithOverheadTest()
+ : WebRtcVoiceEngineTestFake(
+ "WebRTC-Audio-SendSideBwe/Enabled/WebRTC-SendSideBwe-WithOverhead/"
+ "Enabled/") {}
+};
+
+TEST_F(WebRtcVoiceEngineWithSendSideBweWithOverheadTest, MinAndMaxBitrate) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ SetSendParameters(parameters);
+ const int initial_num = call_.GetNumCreatedSendStreams();
+ EXPECT_EQ(initial_num, call_.GetNumCreatedSendStreams());
+
+ // OverheadPerPacket = Ipv4(20B) + UDP(8B) + SRTP(10B) + RTP(12)
+ constexpr int kOverheadPerPacket = 20 + 8 + 10 + 12;
+ constexpr int kOpusMaxPtimeMs = WEBRTC_OPUS_SUPPORT_120MS_PTIME ? 120 : 60;
+ constexpr int kMinOverheadBps =
+ kOverheadPerPacket * 8 * 1000 / kOpusMaxPtimeMs;
+
+ constexpr int kOpusMinBitrateBps = 6000;
+ EXPECT_EQ(kOpusMinBitrateBps + kMinOverheadBps,
+ GetSendStreamConfig(kSsrcX).min_bitrate_bps);
+ constexpr int kOpusBitrateFbBps = 32000;
+ EXPECT_EQ(kOpusBitrateFbBps + kMinOverheadBps,
+ GetSendStreamConfig(kSsrcX).max_bitrate_bps);
+
+ parameters.options.audio_network_adaptor = true;
+ parameters.options.audio_network_adaptor_config = {"1234"};
+ SetSendParameters(parameters);
+
+ constexpr int kMinOverheadWithAnaBps =
+ kOverheadPerPacket * 8 * 1000 / kOpusMaxPtimeMs;
+
+ EXPECT_EQ(kOpusMinBitrateBps + kMinOverheadWithAnaBps,
+ GetSendStreamConfig(kSsrcX).min_bitrate_bps);
+
+ EXPECT_EQ(kOpusBitrateFbBps + kMinOverheadWithAnaBps,
+ GetSendStreamConfig(kSsrcX).max_bitrate_bps);
+}
+
+// This test is similar to
+// WebRtcVoiceEngineTestFake.SetRtpSendParameterUpdatesMaxBitrate but with an
+// additional field trial.
+TEST_F(WebRtcVoiceEngineWithSendSideBweWithOverheadTest,
+ SetRtpSendParameterUpdatesMaxBitrate) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters send_parameters;
+ send_parameters.codecs.push_back(kOpusCodec);
+ SetSendParameters(send_parameters);
+
+ webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX);
+ // Expect empty on parameters.encodings[0].max_bitrate_bps;
+ EXPECT_FALSE(rtp_parameters.encodings[0].max_bitrate_bps);
+
+ constexpr int kMaxBitrateBps = 6000;
+ rtp_parameters.encodings[0].max_bitrate_bps = kMaxBitrateBps;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, rtp_parameters));
+
+ const int max_bitrate = GetSendStreamConfig(kSsrcX).max_bitrate_bps;
+#if WEBRTC_OPUS_SUPPORT_120MS_PTIME
+ constexpr int kMinOverhead = 3333;
+#else
+ constexpr int kMinOverhead = 6666;
+#endif
+ EXPECT_EQ(max_bitrate, kMaxBitrateBps + kMinOverhead);
+}
+
+// Test that we can set the outgoing SSRC properly.
+// SSRC is set in SetupSendStream() by calling AddSendStream.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendSsrc) {
+ EXPECT_TRUE(SetupSendStream());
+ EXPECT_TRUE(call_.GetAudioSendStream(kSsrcX));
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, GetStats) {
+ // Setup. We need send codec to be set to get all stats.
+ EXPECT_TRUE(SetupSendStream());
+ // SetupSendStream adds a send stream with kSsrcX, so the receive
+ // stream has to use a different SSRC.
+ EXPECT_TRUE(AddRecvStream(kSsrcY));
+ SetSendParameters(send_parameters_);
+ EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_));
+ SetAudioSendStreamStats();
+
+ // Check stats for the added streams.
+ {
+ cricket::VoiceMediaInfo info;
+ EXPECT_EQ(true, channel_->GetStats(&info));
+
+ // We have added one send stream. We should see the stats we've set.
+ EXPECT_EQ(1u, info.senders.size());
+ VerifyVoiceSenderInfo(info.senders[0], false);
+ // We have added one receive stream. We should see empty stats.
+ EXPECT_EQ(info.receivers.size(), 1u);
+ EXPECT_EQ(info.receivers[0].ssrc(), 0);
+ }
+
+ // Start sending - this affects some reported stats.
+ {
+ cricket::VoiceMediaInfo info;
+ SetSend(true);
+ EXPECT_EQ(true, channel_->GetStats(&info));
+ VerifyVoiceSenderInfo(info.senders[0], true);
+ VerifyVoiceSendRecvCodecs(info);
+ }
+
+ // Remove the kSsrcY stream. No receiver stats.
+ {
+ cricket::VoiceMediaInfo info;
+ EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcY));
+ EXPECT_EQ(true, channel_->GetStats(&info));
+ EXPECT_EQ(1u, info.senders.size());
+ EXPECT_EQ(0u, info.receivers.size());
+ }
+
+ // Deliver a new packet - a default receive stream should be created and we
+ // should see stats again.
+ {
+ cricket::VoiceMediaInfo info;
+ DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
+ SetAudioReceiveStreamStats();
+ EXPECT_EQ(true, channel_->GetStats(&info));
+ EXPECT_EQ(1u, info.senders.size());
+ EXPECT_EQ(1u, info.receivers.size());
+ VerifyVoiceReceiverInfo(info.receivers[0]);
+ VerifyVoiceSendRecvCodecs(info);
+ }
+}
+
+// Test that we can set the outgoing SSRC properly with multiple streams.
+// SSRC is set in SetupSendStream() by calling AddSendStream.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendSsrcWithMultipleStreams) {
+ EXPECT_TRUE(SetupSendStream());
+ EXPECT_TRUE(call_.GetAudioSendStream(kSsrcX));
+ EXPECT_TRUE(AddRecvStream(kSsrcY));
+ EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc);
+}
+
+// Test that the local SSRC is the same on sending and receiving channels if the
+// receive channel is created before the send channel.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendSsrcAfterCreatingReceiveChannel) {
+ EXPECT_TRUE(SetupChannel());
+ EXPECT_TRUE(AddRecvStream(kSsrcY));
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrcX)));
+ EXPECT_TRUE(call_.GetAudioSendStream(kSsrcX));
+ EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc);
+}
+
+// Test that we can properly receive packets.
+TEST_F(WebRtcVoiceEngineTestFake, Recv) {
+ EXPECT_TRUE(SetupChannel());
+ EXPECT_TRUE(AddRecvStream(1));
+ DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
+
+ EXPECT_TRUE(GetRecvStream(1).VerifyLastPacket(kPcmuFrame,
+ sizeof(kPcmuFrame)));
+}
+
+// Test that we can properly receive packets on multiple streams.
+TEST_F(WebRtcVoiceEngineTestFake, RecvWithMultipleStreams) {
+ EXPECT_TRUE(SetupChannel());
+ const uint32_t ssrc1 = 1;
+ const uint32_t ssrc2 = 2;
+ const uint32_t ssrc3 = 3;
+ EXPECT_TRUE(AddRecvStream(ssrc1));
+ EXPECT_TRUE(AddRecvStream(ssrc2));
+ EXPECT_TRUE(AddRecvStream(ssrc3));
+ // Create packets with the right SSRCs.
+ unsigned char packets[4][sizeof(kPcmuFrame)];
+ for (size_t i = 0; i < arraysize(packets); ++i) {
+ memcpy(packets[i], kPcmuFrame, sizeof(kPcmuFrame));
+ rtc::SetBE32(packets[i] + 8, static_cast<uint32_t>(i));
+ }
+
+ const cricket::FakeAudioReceiveStream& s1 = GetRecvStream(ssrc1);
+ const cricket::FakeAudioReceiveStream& s2 = GetRecvStream(ssrc2);
+ const cricket::FakeAudioReceiveStream& s3 = GetRecvStream(ssrc3);
+
+ EXPECT_EQ(s1.received_packets(), 0);
+ EXPECT_EQ(s2.received_packets(), 0);
+ EXPECT_EQ(s3.received_packets(), 0);
+
+ DeliverPacket(packets[0], sizeof(packets[0]));
+ EXPECT_EQ(s1.received_packets(), 0);
+ EXPECT_EQ(s2.received_packets(), 0);
+ EXPECT_EQ(s3.received_packets(), 0);
+
+ DeliverPacket(packets[1], sizeof(packets[1]));
+ EXPECT_EQ(s1.received_packets(), 1);
+ EXPECT_TRUE(s1.VerifyLastPacket(packets[1], sizeof(packets[1])));
+ EXPECT_EQ(s2.received_packets(), 0);
+ EXPECT_EQ(s3.received_packets(), 0);
+
+ DeliverPacket(packets[2], sizeof(packets[2]));
+ EXPECT_EQ(s1.received_packets(), 1);
+ EXPECT_EQ(s2.received_packets(), 1);
+ EXPECT_TRUE(s2.VerifyLastPacket(packets[2], sizeof(packets[2])));
+ EXPECT_EQ(s3.received_packets(), 0);
+
+ DeliverPacket(packets[3], sizeof(packets[3]));
+ EXPECT_EQ(s1.received_packets(), 1);
+ EXPECT_EQ(s2.received_packets(), 1);
+ EXPECT_EQ(s3.received_packets(), 1);
+ EXPECT_TRUE(s3.VerifyLastPacket(packets[3], sizeof(packets[3])));
+
+ EXPECT_TRUE(channel_->RemoveRecvStream(ssrc3));
+ EXPECT_TRUE(channel_->RemoveRecvStream(ssrc2));
+ EXPECT_TRUE(channel_->RemoveRecvStream(ssrc1));
+}
+
+// Test that receiving on an unsignaled stream works (a stream is created).
+TEST_F(WebRtcVoiceEngineTestFake, RecvUnsignaled) {
+ EXPECT_TRUE(SetupChannel());
+ EXPECT_EQ(0, call_.GetAudioReceiveStreams().size());
+
+ DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
+
+ EXPECT_EQ(1, call_.GetAudioReceiveStreams().size());
+ EXPECT_TRUE(GetRecvStream(kSsrc1).VerifyLastPacket(kPcmuFrame,
+ sizeof(kPcmuFrame)));
+}
+
+// Test that receiving N unsignaled stream works (streams will be created), and
+// that packets are forwarded to them all.
+TEST_F(WebRtcVoiceEngineTestFake, RecvMultipleUnsignaled) {
+ EXPECT_TRUE(SetupChannel());
+ unsigned char packet[sizeof(kPcmuFrame)];
+ memcpy(packet, kPcmuFrame, sizeof(kPcmuFrame));
+
+ // Note that SSRC = 0 is not supported.
+ for (uint32_t ssrc = 1; ssrc < (1 + kMaxUnsignaledRecvStreams); ++ssrc) {
+ rtc::SetBE32(&packet[8], ssrc);
+ DeliverPacket(packet, sizeof(packet));
+
+ // Verify we have one new stream for each loop iteration.
+ EXPECT_EQ(ssrc, call_.GetAudioReceiveStreams().size());
+ EXPECT_EQ(1, GetRecvStream(ssrc).received_packets());
+ EXPECT_TRUE(GetRecvStream(ssrc).VerifyLastPacket(packet, sizeof(packet)));
+ }
+
+ // Sending on the same SSRCs again should not create new streams.
+ for (uint32_t ssrc = 1; ssrc < (1 + kMaxUnsignaledRecvStreams); ++ssrc) {
+ rtc::SetBE32(&packet[8], ssrc);
+ DeliverPacket(packet, sizeof(packet));
+
+ EXPECT_EQ(kMaxUnsignaledRecvStreams, call_.GetAudioReceiveStreams().size());
+ EXPECT_EQ(2, GetRecvStream(ssrc).received_packets());
+ EXPECT_TRUE(GetRecvStream(ssrc).VerifyLastPacket(packet, sizeof(packet)));
+ }
+
+ // Send on another SSRC, the oldest unsignaled stream (SSRC=1) is replaced.
+ constexpr uint32_t kAnotherSsrc = 667;
+ rtc::SetBE32(&packet[8], kAnotherSsrc);
+ DeliverPacket(packet, sizeof(packet));
+
+ const auto& streams = call_.GetAudioReceiveStreams();
+ EXPECT_EQ(kMaxUnsignaledRecvStreams, streams.size());
+ size_t i = 0;
+ for (uint32_t ssrc = 2; ssrc < (1 + kMaxUnsignaledRecvStreams); ++ssrc, ++i) {
+ EXPECT_EQ(ssrc, streams[i]->GetConfig().rtp.remote_ssrc);
+ EXPECT_EQ(2, streams[i]->received_packets());
+ }
+ EXPECT_EQ(kAnotherSsrc, streams[i]->GetConfig().rtp.remote_ssrc);
+ EXPECT_EQ(1, streams[i]->received_packets());
+ // Sanity check that we've checked all streams.
+ EXPECT_EQ(kMaxUnsignaledRecvStreams, (i + 1));
+}
+
+// Test that a default channel is created even after a signaled stream has been
+// added, and that this stream will get any packets for unknown SSRCs.
+TEST_F(WebRtcVoiceEngineTestFake, RecvUnsignaledAfterSignaled) {
+ EXPECT_TRUE(SetupChannel());
+ unsigned char packet[sizeof(kPcmuFrame)];
+ memcpy(packet, kPcmuFrame, sizeof(kPcmuFrame));
+
+ // Add a known stream, send packet and verify we got it.
+ const uint32_t signaled_ssrc = 1;
+ rtc::SetBE32(&packet[8], signaled_ssrc);
+ EXPECT_TRUE(AddRecvStream(signaled_ssrc));
+ DeliverPacket(packet, sizeof(packet));
+ EXPECT_TRUE(GetRecvStream(signaled_ssrc).VerifyLastPacket(
+ packet, sizeof(packet)));
+ EXPECT_EQ(1, call_.GetAudioReceiveStreams().size());
+
+ // Note that the first unknown SSRC cannot be 0, because we only support
+ // creating receive streams for SSRC!=0.
+ const uint32_t unsignaled_ssrc = 7011;
+ rtc::SetBE32(&packet[8], unsignaled_ssrc);
+ DeliverPacket(packet, sizeof(packet));
+ EXPECT_TRUE(GetRecvStream(unsignaled_ssrc).VerifyLastPacket(
+ packet, sizeof(packet)));
+ EXPECT_EQ(2, call_.GetAudioReceiveStreams().size());
+
+ DeliverPacket(packet, sizeof(packet));
+ EXPECT_EQ(2, GetRecvStream(unsignaled_ssrc).received_packets());
+
+ rtc::SetBE32(&packet[8], signaled_ssrc);
+ DeliverPacket(packet, sizeof(packet));
+ EXPECT_EQ(2, GetRecvStream(signaled_ssrc).received_packets());
+ EXPECT_EQ(2, call_.GetAudioReceiveStreams().size());
+}
+
+// Two tests to verify that adding a receive stream with the same SSRC as a
+// previously added unsignaled stream will only recreate underlying stream
+// objects if the stream parameters have changed.
+TEST_F(WebRtcVoiceEngineTestFake, AddRecvStreamAfterUnsignaled_NoRecreate) {
+ EXPECT_TRUE(SetupChannel());
+
+ // Spawn unsignaled stream with SSRC=1.
+ DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
+ EXPECT_EQ(1, call_.GetAudioReceiveStreams().size());
+ EXPECT_TRUE(GetRecvStream(1).VerifyLastPacket(kPcmuFrame,
+ sizeof(kPcmuFrame)));
+
+ // Verify that the underlying stream object in Call is not recreated when a
+ // stream with SSRC=1 is added.
+ const auto& streams = call_.GetAudioReceiveStreams();
+ EXPECT_EQ(1, streams.size());
+ int audio_receive_stream_id = streams.front()->id();
+ EXPECT_TRUE(AddRecvStream(1));
+ EXPECT_EQ(1, streams.size());
+ EXPECT_EQ(audio_receive_stream_id, streams.front()->id());
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, AddRecvStreamAfterUnsignaled_Recreate) {
+ EXPECT_TRUE(SetupChannel());
+
+ // Spawn unsignaled stream with SSRC=1.
+ DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
+ EXPECT_EQ(1, call_.GetAudioReceiveStreams().size());
+ EXPECT_TRUE(GetRecvStream(1).VerifyLastPacket(kPcmuFrame,
+ sizeof(kPcmuFrame)));
+
+ // Verify that the underlying stream object in Call *is* recreated when a
+ // stream with SSRC=1 is added, and which has changed stream parameters.
+ const auto& streams = call_.GetAudioReceiveStreams();
+ EXPECT_EQ(1, streams.size());
+ int audio_receive_stream_id = streams.front()->id();
+ cricket::StreamParams stream_params;
+ stream_params.ssrcs.push_back(1);
+ stream_params.sync_label = "sync_label";
+ EXPECT_TRUE(channel_->AddRecvStream(stream_params));
+ EXPECT_EQ(1, streams.size());
+ EXPECT_NE(audio_receive_stream_id, streams.front()->id());
+}
+
+// Test that we properly handle failures to add a receive stream.
+TEST_F(WebRtcVoiceEngineTestFake, AddRecvStreamFail) {
+ EXPECT_TRUE(SetupChannel());
+ voe_.set_fail_create_channel(true);
+ EXPECT_FALSE(AddRecvStream(2));
+}
+
+// Test that we properly handle failures to add a send stream.
+TEST_F(WebRtcVoiceEngineTestFake, AddSendStreamFail) {
+ EXPECT_TRUE(SetupChannel());
+ voe_.set_fail_create_channel(true);
+ EXPECT_FALSE(channel_->AddSendStream(cricket::StreamParams::CreateLegacy(2)));
+}
+
+// Test that AddRecvStream creates new stream.
+TEST_F(WebRtcVoiceEngineTestFake, AddRecvStream) {
+ EXPECT_TRUE(SetupRecvStream());
+ int channel_num = voe_.GetLastChannel();
+ EXPECT_TRUE(AddRecvStream(1));
+ EXPECT_NE(channel_num, voe_.GetLastChannel());
+}
+
+// Test that after adding a recv stream, we do not decode more codecs than
+// those previously passed into SetRecvCodecs.
+TEST_F(WebRtcVoiceEngineTestFake, AddRecvStreamUnsupportedCodec) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ EXPECT_TRUE(AddRecvStream(kSsrcX));
+ EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map,
+ (ContainerEq<std::map<int, webrtc::SdpAudioFormat>>(
+ {{0, {"PCMU", 8000, 1}}, {103, {"ISAC", 16000, 1}}})));
+}
+
+// Test that we properly clean up any streams that were added, even if
+// not explicitly removed.
+TEST_F(WebRtcVoiceEngineTestFake, StreamCleanup) {
+ EXPECT_TRUE(SetupSendStream());
+ SetSendParameters(send_parameters_);
+ EXPECT_TRUE(AddRecvStream(1));
+ EXPECT_TRUE(AddRecvStream(2));
+ EXPECT_EQ(3, voe_.GetNumChannels()); // default channel + 2 added
+ delete channel_;
+ channel_ = NULL;
+ EXPECT_EQ(0, voe_.GetNumChannels());
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, TestAddRecvStreamFailWithZeroSsrc) {
+ EXPECT_TRUE(SetupSendStream());
+ EXPECT_FALSE(AddRecvStream(0));
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, TestNoLeakingWhenAddRecvStreamFail) {
+ EXPECT_TRUE(SetupChannel());
+ EXPECT_TRUE(AddRecvStream(1));
+ // Manually delete channel to simulate a failure.
+ int channel = voe_.GetLastChannel();
+ EXPECT_EQ(0, voe_.DeleteChannel(channel));
+ // Add recv stream 2 should work.
+ EXPECT_TRUE(AddRecvStream(2));
+ int new_channel = voe_.GetLastChannel();
+ EXPECT_NE(channel, new_channel);
+ // The last created channel is deleted too.
+ EXPECT_EQ(0, voe_.DeleteChannel(new_channel));
+}
+
+// Test the InsertDtmf on default send stream as caller.
+TEST_F(WebRtcVoiceEngineTestFake, InsertDtmfOnDefaultSendStreamAsCaller) {
+ TestInsertDtmf(0, true, kTelephoneEventCodec1);
+}
+
+// Test the InsertDtmf on default send stream as callee
+TEST_F(WebRtcVoiceEngineTestFake, InsertDtmfOnDefaultSendStreamAsCallee) {
+ TestInsertDtmf(0, false, kTelephoneEventCodec2);
+}
+
+// Test the InsertDtmf on specified send stream as caller.
+TEST_F(WebRtcVoiceEngineTestFake, InsertDtmfOnSendStreamAsCaller) {
+ TestInsertDtmf(kSsrcX, true, kTelephoneEventCodec2);
+}
+
+// Test the InsertDtmf on specified send stream as callee.
+TEST_F(WebRtcVoiceEngineTestFake, InsertDtmfOnSendStreamAsCallee) {
+ TestInsertDtmf(kSsrcX, false, kTelephoneEventCodec1);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetAudioOptions) {
+ EXPECT_TRUE(SetupSendStream());
+ EXPECT_CALL(adm_,
+ BuiltInAECIsAvailable()).Times(9).WillRepeatedly(Return(false));
+ EXPECT_CALL(adm_,
+ BuiltInAGCIsAvailable()).Times(4).WillRepeatedly(Return(false));
+ EXPECT_CALL(adm_,
+ BuiltInNSIsAvailable()).Times(2).WillRepeatedly(Return(false));
+
+ EXPECT_EQ(50, voe_.GetNetEqCapacity());
+ EXPECT_FALSE(voe_.GetNetEqFastAccelerate());
+
+ // Nothing set in AudioOptions, so everything should be as default.
+ send_parameters_.options = cricket::AudioOptions();
+ SetSendParameters(send_parameters_);
+ EXPECT_TRUE(IsHighPassFilterEnabled());
+ EXPECT_EQ(50, voe_.GetNetEqCapacity());
+ EXPECT_FALSE(voe_.GetNetEqFastAccelerate());
+
+ // Turn echo cancellation off
+ EXPECT_CALL(apm_ec_, Enable(false)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(false)).WillOnce(Return(0));
+ send_parameters_.options.echo_cancellation = false;
+ SetSendParameters(send_parameters_);
+
+ // Turn echo cancellation back on, with settings, and make sure
+ // nothing else changed.
+ EXPECT_CALL(apm_ec_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).WillOnce(Return(0));
+ send_parameters_.options.echo_cancellation = true;
+ SetSendParameters(send_parameters_);
+
+ // Turn on delay agnostic aec and make sure nothing change w.r.t. echo
+ // control.
+ EXPECT_CALL(apm_ec_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).WillOnce(Return(0));
+ send_parameters_.options.delay_agnostic_aec = true;
+ SetSendParameters(send_parameters_);
+
+ // Turn off echo cancellation and delay agnostic aec.
+ EXPECT_CALL(apm_ec_, Enable(false)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(false)).WillOnce(Return(0));
+ send_parameters_.options.delay_agnostic_aec = false;
+ send_parameters_.options.extended_filter_aec = false;
+ send_parameters_.options.echo_cancellation = false;
+ SetSendParameters(send_parameters_);
+
+ // Turning delay agnostic aec back on should also turn on echo cancellation.
+ EXPECT_CALL(apm_ec_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).WillOnce(Return(0));
+ send_parameters_.options.delay_agnostic_aec = true;
+ SetSendParameters(send_parameters_);
+
+ // Turn off AGC
+ EXPECT_CALL(adm_, SetAGC(false)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, set_mode(kDefaultAgcMode)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, Enable(false)).WillOnce(Return(0));
+ send_parameters_.options.auto_gain_control = false;
+ SetSendParameters(send_parameters_);
+
+ // Turn AGC back on
+ EXPECT_CALL(adm_, SetAGC(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, set_mode(kDefaultAgcMode)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, Enable(true)).WillOnce(Return(0));
+ send_parameters_.options.auto_gain_control = true;
+ send_parameters_.options.adjust_agc_delta = rtc::nullopt;
+ SetSendParameters(send_parameters_);
+
+ // Turn off other options (and stereo swapping on).
+ EXPECT_CALL(adm_, SetAGC(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, set_mode(kDefaultAgcMode)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, set_level(kDefaultNsLevel)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, Enable(false)).WillOnce(Return(0));
+ EXPECT_CALL(apm_vd_, Enable(false)).WillOnce(Return(0));
+ EXPECT_CALL(transmit_mixer_, EnableStereoChannelSwapping(true));
+ send_parameters_.options.noise_suppression = false;
+ send_parameters_.options.highpass_filter = false;
+ send_parameters_.options.typing_detection = false;
+ send_parameters_.options.stereo_swapping = true;
+ SetSendParameters(send_parameters_);
+ EXPECT_FALSE(IsHighPassFilterEnabled());
+
+ // Set options again to ensure it has no impact.
+ EXPECT_CALL(adm_, SetAGC(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, set_mode(kDefaultAgcMode)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, set_level(kDefaultNsLevel)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, Enable(false)).WillOnce(Return(0));
+ EXPECT_CALL(apm_vd_, Enable(false)).WillOnce(Return(0));
+ EXPECT_CALL(transmit_mixer_, EnableStereoChannelSwapping(true));
+ SetSendParameters(send_parameters_);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) {
+ EXPECT_TRUE(SetupSendStream());
+ EXPECT_CALL(adm_,
+ BuiltInAECIsAvailable()).Times(8).WillRepeatedly(Return(false));
+ EXPECT_CALL(adm_,
+ BuiltInAGCIsAvailable()).Times(8).WillRepeatedly(Return(false));
+ EXPECT_CALL(adm_,
+ BuiltInNSIsAvailable()).Times(8).WillRepeatedly(Return(false));
+ EXPECT_CALL(adm_,
+ RecordingIsInitialized()).Times(2).WillRepeatedly(Return(false));
+ EXPECT_CALL(adm_, Recording()).Times(2).WillRepeatedly(Return(false));
+ EXPECT_CALL(adm_, InitRecording()).Times(2).WillRepeatedly(Return(0));
+ webrtc::AudioProcessing::Config apm_config;
+ EXPECT_CALL(*apm_, GetConfig())
+ .Times(10)
+ .WillRepeatedly(ReturnPointee(&apm_config));
+ EXPECT_CALL(*apm_, ApplyConfig(_))
+ .Times(10)
+ .WillRepeatedly(SaveArg<0>(&apm_config));
+ EXPECT_CALL(*apm_, SetExtraOptions(testing::_)).Times(10);
+
+ std::unique_ptr<cricket::WebRtcVoiceMediaChannel> channel1(
+ static_cast<cricket::WebRtcVoiceMediaChannel*>(engine_->CreateChannel(
+ &call_, cricket::MediaConfig(), cricket::AudioOptions())));
+ std::unique_ptr<cricket::WebRtcVoiceMediaChannel> channel2(
+ static_cast<cricket::WebRtcVoiceMediaChannel*>(engine_->CreateChannel(
+ &call_, cricket::MediaConfig(), cricket::AudioOptions())));
+
+ // Have to add a stream to make SetSend work.
+ cricket::StreamParams stream1;
+ stream1.ssrcs.push_back(1);
+ channel1->AddSendStream(stream1);
+ cricket::StreamParams stream2;
+ stream2.ssrcs.push_back(2);
+ channel2->AddSendStream(stream2);
+
+ // AEC and AGC and NS
+ cricket::AudioSendParameters parameters_options_all = send_parameters_;
+ parameters_options_all.options.echo_cancellation = true;
+ parameters_options_all.options.auto_gain_control = true;
+ parameters_options_all.options.noise_suppression = true;
+ EXPECT_CALL(adm_, SetAGC(true)).Times(2).WillRepeatedly(Return(0));
+ EXPECT_CALL(apm_ec_, Enable(true)).Times(2).WillRepeatedly(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).Times(2).WillRepeatedly(Return(0));
+ EXPECT_CALL(apm_gc_, set_mode(kDefaultAgcMode)).Times(2).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, Enable(true)).Times(2).WillRepeatedly(Return(0));
+ EXPECT_CALL(apm_ns_, set_level(kDefaultNsLevel)).Times(2).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, Enable(true)).Times(2).WillRepeatedly(Return(0));
+ EXPECT_TRUE(channel1->SetSendParameters(parameters_options_all));
+ EXPECT_EQ(parameters_options_all.options, channel1->options());
+ EXPECT_TRUE(channel2->SetSendParameters(parameters_options_all));
+ EXPECT_EQ(parameters_options_all.options, channel2->options());
+
+ // unset NS
+ cricket::AudioSendParameters parameters_options_no_ns = send_parameters_;
+ parameters_options_no_ns.options.noise_suppression = false;
+ EXPECT_CALL(adm_, SetAGC(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, set_mode(kDefaultAgcMode)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, set_level(kDefaultNsLevel)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, Enable(false)).WillOnce(Return(0));
+ EXPECT_TRUE(channel1->SetSendParameters(parameters_options_no_ns));
+ cricket::AudioOptions expected_options = parameters_options_all.options;
+ expected_options.echo_cancellation = true;
+ expected_options.auto_gain_control = true;
+ expected_options.noise_suppression = false;
+ EXPECT_EQ(expected_options, channel1->options());
+
+ // unset AGC
+ cricket::AudioSendParameters parameters_options_no_agc = send_parameters_;
+ parameters_options_no_agc.options.auto_gain_control = false;
+ EXPECT_CALL(adm_, SetAGC(false)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, set_mode(kDefaultAgcMode)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, Enable(false)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, set_level(kDefaultNsLevel)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, Enable(true)).WillOnce(Return(0));
+ EXPECT_TRUE(channel2->SetSendParameters(parameters_options_no_agc));
+ expected_options.echo_cancellation = true;
+ expected_options.auto_gain_control = false;
+ expected_options.noise_suppression = true;
+ EXPECT_EQ(expected_options, channel2->options());
+
+ EXPECT_CALL(adm_, SetAGC(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, set_mode(kDefaultAgcMode)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, set_level(kDefaultNsLevel)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, Enable(true)).WillOnce(Return(0));
+ EXPECT_TRUE(channel_->SetSendParameters(parameters_options_all));
+
+ EXPECT_CALL(adm_, SetAGC(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, set_mode(kDefaultAgcMode)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, set_level(kDefaultNsLevel)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, Enable(false)).WillOnce(Return(0));
+ channel1->SetSend(true);
+
+ EXPECT_CALL(adm_, SetAGC(false)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, set_mode(kDefaultAgcMode)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, Enable(false)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, set_level(kDefaultNsLevel)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, Enable(true)).WillOnce(Return(0));
+ channel2->SetSend(true);
+
+ // Make sure settings take effect while we are sending.
+ cricket::AudioSendParameters parameters_options_no_agc_nor_ns =
+ send_parameters_;
+ parameters_options_no_agc_nor_ns.options.auto_gain_control = false;
+ parameters_options_no_agc_nor_ns.options.noise_suppression = false;
+ EXPECT_CALL(adm_, SetAGC(false)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, Enable(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ec_, enable_metrics(true)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, set_mode(kDefaultAgcMode)).WillOnce(Return(0));
+ EXPECT_CALL(apm_gc_, Enable(false)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, set_level(kDefaultNsLevel)).WillOnce(Return(0));
+ EXPECT_CALL(apm_ns_, Enable(false)).WillOnce(Return(0));
+ EXPECT_TRUE(channel2->SetSendParameters(parameters_options_no_agc_nor_ns));
+ expected_options.echo_cancellation = true;
+ expected_options.auto_gain_control = false;
+ expected_options.noise_suppression = false;
+ EXPECT_EQ(expected_options, channel2->options());
+}
+
+// This test verifies DSCP settings are properly applied on voice media channel.
+TEST_F(WebRtcVoiceEngineTestFake, TestSetDscpOptions) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::FakeNetworkInterface network_interface;
+ cricket::MediaConfig config;
+ std::unique_ptr<cricket::VoiceMediaChannel> channel;
+
+ webrtc::AudioProcessing::Config apm_config;
+ EXPECT_CALL(*apm_, GetConfig())
+ .Times(3)
+ .WillRepeatedly(ReturnPointee(&apm_config));
+ EXPECT_CALL(*apm_, ApplyConfig(_))
+ .Times(3)
+ .WillRepeatedly(SaveArg<0>(&apm_config));
+ EXPECT_CALL(*apm_, SetExtraOptions(testing::_)).Times(3);
+
+ channel.reset(
+ engine_->CreateChannel(&call_, config, cricket::AudioOptions()));
+ channel->SetInterface(&network_interface);
+ // Default value when DSCP is disabled should be DSCP_DEFAULT.
+ EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp());
+
+ config.enable_dscp = true;
+ channel.reset(
+ engine_->CreateChannel(&call_, config, cricket::AudioOptions()));
+ channel->SetInterface(&network_interface);
+ EXPECT_EQ(rtc::DSCP_EF, network_interface.dscp());
+
+ // Verify that setting the option to false resets the
+ // DiffServCodePoint.
+ config.enable_dscp = false;
+ channel.reset(
+ engine_->CreateChannel(&call_, config, cricket::AudioOptions()));
+ channel->SetInterface(&network_interface);
+ // Default value when DSCP is disabled should be DSCP_DEFAULT.
+ EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp());
+
+ channel->SetInterface(nullptr);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, TestGetReceiveChannelId) {
+ EXPECT_TRUE(SetupChannel());
+ cricket::WebRtcVoiceMediaChannel* media_channel =
+ static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_);
+ EXPECT_EQ(-1, media_channel->GetReceiveChannelId(0));
+ EXPECT_TRUE(AddRecvStream(kSsrcX));
+ int channel_id = voe_.GetLastChannel();
+ EXPECT_EQ(channel_id, media_channel->GetReceiveChannelId(kSsrcX));
+ EXPECT_EQ(-1, media_channel->GetReceiveChannelId(kSsrcY));
+ EXPECT_TRUE(AddRecvStream(kSsrcY));
+ int channel_id2 = voe_.GetLastChannel();
+ EXPECT_EQ(channel_id2, media_channel->GetReceiveChannelId(kSsrcY));
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, TestGetSendChannelId) {
+ EXPECT_TRUE(SetupChannel());
+ cricket::WebRtcVoiceMediaChannel* media_channel =
+ static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_);
+ EXPECT_EQ(-1, media_channel->GetSendChannelId(0));
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrcX)));
+ int channel_id = voe_.GetLastChannel();
+ EXPECT_EQ(channel_id, media_channel->GetSendChannelId(kSsrcX));
+ EXPECT_EQ(-1, media_channel->GetSendChannelId(kSsrcY));
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrcY)));
+ int channel_id2 = voe_.GetLastChannel();
+ EXPECT_EQ(channel_id2, media_channel->GetSendChannelId(kSsrcY));
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetOutputVolume) {
+ EXPECT_TRUE(SetupChannel());
+ EXPECT_FALSE(channel_->SetOutputVolume(kSsrcY, 0.5));
+ cricket::StreamParams stream;
+ stream.ssrcs.push_back(kSsrcY);
+ EXPECT_TRUE(channel_->AddRecvStream(stream));
+ EXPECT_DOUBLE_EQ(1, GetRecvStream(kSsrcY).gain());
+ EXPECT_TRUE(channel_->SetOutputVolume(kSsrcY, 3));
+ EXPECT_DOUBLE_EQ(3, GetRecvStream(kSsrcY).gain());
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetOutputVolumeUnsignaledRecvStream) {
+ EXPECT_TRUE(SetupChannel());
+
+ // Spawn an unsignaled stream by sending a packet - gain should be 1.
+ DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
+ EXPECT_DOUBLE_EQ(1, GetRecvStream(kSsrc1).gain());
+
+ // Should remember the volume "2" which will be set on new unsignaled streams,
+ // and also set the gain to 2 on existing unsignaled streams.
+ EXPECT_TRUE(channel_->SetOutputVolume(kSsrc0, 2));
+ EXPECT_DOUBLE_EQ(2, GetRecvStream(kSsrc1).gain());
+
+ // Spawn an unsignaled stream by sending a packet - gain should be 2.
+ unsigned char pcmuFrame2[sizeof(kPcmuFrame)];
+ memcpy(pcmuFrame2, kPcmuFrame, sizeof(kPcmuFrame));
+ rtc::SetBE32(&pcmuFrame2[8], kSsrcX);
+ DeliverPacket(pcmuFrame2, sizeof(pcmuFrame2));
+ EXPECT_DOUBLE_EQ(2, GetRecvStream(kSsrcX).gain());
+
+ // Setting gain with SSRC=0 should affect all unsignaled streams.
+ EXPECT_TRUE(channel_->SetOutputVolume(kSsrc0, 3));
+ if (kMaxUnsignaledRecvStreams > 1) {
+ EXPECT_DOUBLE_EQ(3, GetRecvStream(kSsrc1).gain());
+ }
+ EXPECT_DOUBLE_EQ(3, GetRecvStream(kSsrcX).gain());
+
+ // Setting gain on an individual stream affects only that.
+ EXPECT_TRUE(channel_->SetOutputVolume(kSsrcX, 4));
+ if (kMaxUnsignaledRecvStreams > 1) {
+ EXPECT_DOUBLE_EQ(3, GetRecvStream(kSsrc1).gain());
+ }
+ EXPECT_DOUBLE_EQ(4, GetRecvStream(kSsrcX).gain());
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetsSyncGroupFromSyncLabel) {
+ const uint32_t kAudioSsrc = 123;
+ const std::string kSyncLabel = "AvSyncLabel";
+
+ EXPECT_TRUE(SetupSendStream());
+ cricket::StreamParams sp = cricket::StreamParams::CreateLegacy(kAudioSsrc);
+ sp.sync_label = kSyncLabel;
+ // Creating two channels to make sure that sync label is set properly for both
+ // the default voice channel and following ones.
+ EXPECT_TRUE(channel_->AddRecvStream(sp));
+ sp.ssrcs[0] += 1;
+ EXPECT_TRUE(channel_->AddRecvStream(sp));
+
+ ASSERT_EQ(2, call_.GetAudioReceiveStreams().size());
+ EXPECT_EQ(kSyncLabel,
+ call_.GetAudioReceiveStream(kAudioSsrc)->GetConfig().sync_group)
+ << "SyncGroup should be set based on sync_label";
+ EXPECT_EQ(kSyncLabel,
+ call_.GetAudioReceiveStream(kAudioSsrc + 1)->GetConfig().sync_group)
+ << "SyncGroup should be set based on sync_label";
+}
+
+// TODO(solenberg): Remove, once recv streams are configured through Call.
+// (This is then covered by TestSetRecvRtpHeaderExtensions.)
+TEST_F(WebRtcVoiceEngineTestFake, ConfiguresAudioReceiveStreamRtpExtensions) {
+ // Test that setting the header extensions results in the expected state
+ // changes on an associated Call.
+ std::vector<uint32_t> ssrcs;
+ ssrcs.push_back(223);
+ ssrcs.push_back(224);
+
+ EXPECT_TRUE(SetupSendStream());
+ SetSendParameters(send_parameters_);
+ for (uint32_t ssrc : ssrcs) {
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(ssrc)));
+ }
+
+ EXPECT_EQ(2, call_.GetAudioReceiveStreams().size());
+ for (uint32_t ssrc : ssrcs) {
+ const auto* s = call_.GetAudioReceiveStream(ssrc);
+ EXPECT_NE(nullptr, s);
+ EXPECT_EQ(0, s->GetConfig().rtp.extensions.size());
+ }
+
+ // Set up receive extensions.
+ cricket::RtpCapabilities capabilities = engine_->GetCapabilities();
+ cricket::AudioRecvParameters recv_parameters;
+ recv_parameters.extensions = capabilities.header_extensions;
+ channel_->SetRecvParameters(recv_parameters);
+ EXPECT_EQ(2, call_.GetAudioReceiveStreams().size());
+ for (uint32_t ssrc : ssrcs) {
+ const auto* s = call_.GetAudioReceiveStream(ssrc);
+ EXPECT_NE(nullptr, s);
+ const auto& s_exts = s->GetConfig().rtp.extensions;
+ EXPECT_EQ(capabilities.header_extensions.size(), s_exts.size());
+ for (const auto& e_ext : capabilities.header_extensions) {
+ for (const auto& s_ext : s_exts) {
+ if (e_ext.id == s_ext.id) {
+ EXPECT_EQ(e_ext.uri, s_ext.uri);
+ }
+ }
+ }
+ }
+
+ // Disable receive extensions.
+ channel_->SetRecvParameters(cricket::AudioRecvParameters());
+ for (uint32_t ssrc : ssrcs) {
+ const auto* s = call_.GetAudioReceiveStream(ssrc);
+ EXPECT_NE(nullptr, s);
+ EXPECT_EQ(0, s->GetConfig().rtp.extensions.size());
+ }
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, DeliverAudioPacket_Call) {
+ // Test that packets are forwarded to the Call when configured accordingly.
+ const uint32_t kAudioSsrc = 1;
+ rtc::CopyOnWriteBuffer kPcmuPacket(kPcmuFrame, sizeof(kPcmuFrame));
+ static const unsigned char kRtcp[] = {
+ 0x80, 0xc9, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02,
+ 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
+ };
+ rtc::CopyOnWriteBuffer kRtcpPacket(kRtcp, sizeof(kRtcp));
+
+ EXPECT_TRUE(SetupSendStream());
+ cricket::WebRtcVoiceMediaChannel* media_channel =
+ static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_);
+ SetSendParameters(send_parameters_);
+ EXPECT_TRUE(media_channel->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(kAudioSsrc)));
+
+ EXPECT_EQ(1, call_.GetAudioReceiveStreams().size());
+ const cricket::FakeAudioReceiveStream* s =
+ call_.GetAudioReceiveStream(kAudioSsrc);
+ EXPECT_EQ(0, s->received_packets());
+ channel_->OnPacketReceived(&kPcmuPacket, rtc::PacketTime());
+ EXPECT_EQ(1, s->received_packets());
+ channel_->OnRtcpReceived(&kRtcpPacket, rtc::PacketTime());
+ EXPECT_EQ(2, s->received_packets());
+}
+
+// All receive channels should be associated with the first send channel,
+// since they do not send RTCP SR.
+TEST_F(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_SendCreatedFirst) {
+ EXPECT_TRUE(SetupSendStream());
+ EXPECT_TRUE(AddRecvStream(kSsrcY));
+ EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc);
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrcZ)));
+ EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc);
+ EXPECT_TRUE(AddRecvStream(kSsrcW));
+ EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcW).rtp.local_ssrc);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_RecvCreatedFirst) {
+ EXPECT_TRUE(SetupRecvStream());
+ EXPECT_EQ(0xFA17FA17u, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc);
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrcY)));
+ EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc);
+ EXPECT_TRUE(AddRecvStream(kSsrcZ));
+ EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcZ).rtp.local_ssrc);
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrcW)));
+ EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc);
+ EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcZ).rtp.local_ssrc);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetRawAudioSink) {
+ EXPECT_TRUE(SetupChannel());
+ std::unique_ptr<FakeAudioSink> fake_sink_1(new FakeAudioSink());
+ std::unique_ptr<FakeAudioSink> fake_sink_2(new FakeAudioSink());
+
+ // Setting the sink before a recv stream exists should do nothing.
+ channel_->SetRawAudioSink(kSsrcX, std::move(fake_sink_1));
+ EXPECT_TRUE(AddRecvStream(kSsrcX));
+ EXPECT_EQ(nullptr, GetRecvStream(kSsrcX).sink());
+
+ // Now try actually setting the sink.
+ channel_->SetRawAudioSink(kSsrcX, std::move(fake_sink_2));
+ EXPECT_NE(nullptr, GetRecvStream(kSsrcX).sink());
+
+ // Now try resetting it.
+ channel_->SetRawAudioSink(kSsrcX, nullptr);
+ EXPECT_EQ(nullptr, GetRecvStream(kSsrcX).sink());
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SetRawAudioSinkUnsignaledRecvStream) {
+ EXPECT_TRUE(SetupChannel());
+ std::unique_ptr<FakeAudioSink> fake_sink_1(new FakeAudioSink());
+ std::unique_ptr<FakeAudioSink> fake_sink_2(new FakeAudioSink());
+ std::unique_ptr<FakeAudioSink> fake_sink_3(new FakeAudioSink());
+ std::unique_ptr<FakeAudioSink> fake_sink_4(new FakeAudioSink());
+
+ // Should be able to set a default sink even when no stream exists.
+ channel_->SetRawAudioSink(0, std::move(fake_sink_1));
+
+ // Spawn an unsignaled stream by sending a packet - it should be assigned the
+ // default sink.
+ DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
+ EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink());
+
+ // Try resetting the default sink.
+ channel_->SetRawAudioSink(kSsrc0, nullptr);
+ EXPECT_EQ(nullptr, GetRecvStream(kSsrc1).sink());
+
+ // Try setting the default sink while the default stream exists.
+ channel_->SetRawAudioSink(kSsrc0, std::move(fake_sink_2));
+ EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink());
+
+ // If we remove and add a default stream, it should get the same sink.
+ EXPECT_TRUE(channel_->RemoveRecvStream(kSsrc1));
+ DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
+ EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink());
+
+ // Spawn another unsignaled stream - it should be assigned the default sink
+ // and the previous unsignaled stream should lose it.
+ unsigned char pcmuFrame2[sizeof(kPcmuFrame)];
+ memcpy(pcmuFrame2, kPcmuFrame, sizeof(kPcmuFrame));
+ rtc::SetBE32(&pcmuFrame2[8], kSsrcX);
+ DeliverPacket(pcmuFrame2, sizeof(pcmuFrame2));
+ if (kMaxUnsignaledRecvStreams > 1) {
+ EXPECT_EQ(nullptr, GetRecvStream(kSsrc1).sink());
+ }
+ EXPECT_NE(nullptr, GetRecvStream(kSsrcX).sink());
+
+ // Reset the default sink - the second unsignaled stream should lose it.
+ channel_->SetRawAudioSink(kSsrc0, nullptr);
+ if (kMaxUnsignaledRecvStreams > 1) {
+ EXPECT_EQ(nullptr, GetRecvStream(kSsrc1).sink());
+ }
+ EXPECT_EQ(nullptr, GetRecvStream(kSsrcX).sink());
+
+ // Try setting the default sink while two streams exists.
+ channel_->SetRawAudioSink(kSsrc0, std::move(fake_sink_3));
+ if (kMaxUnsignaledRecvStreams > 1) {
+ EXPECT_EQ(nullptr, GetRecvStream(kSsrc1).sink());
+ }
+ EXPECT_NE(nullptr, GetRecvStream(kSsrcX).sink());
+
+ // Try setting the sink for the first unsignaled stream using its known SSRC.
+ channel_->SetRawAudioSink(kSsrc1, std::move(fake_sink_4));
+ if (kMaxUnsignaledRecvStreams > 1) {
+ EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink());
+ }
+ EXPECT_NE(nullptr, GetRecvStream(kSsrcX).sink());
+ if (kMaxUnsignaledRecvStreams > 1) {
+ EXPECT_NE(GetRecvStream(kSsrc1).sink(), GetRecvStream(kSsrcX).sink());
+ }
+}
+
+// Test that, just like the video channel, the voice channel communicates the
+// network state to the call.
+TEST_F(WebRtcVoiceEngineTestFake, OnReadyToSendSignalsNetworkState) {
+ EXPECT_TRUE(SetupChannel());
+
+ EXPECT_EQ(webrtc::kNetworkUp,
+ call_.GetNetworkState(webrtc::MediaType::AUDIO));
+ EXPECT_EQ(webrtc::kNetworkUp,
+ call_.GetNetworkState(webrtc::MediaType::VIDEO));
+
+ channel_->OnReadyToSend(false);
+ EXPECT_EQ(webrtc::kNetworkDown,
+ call_.GetNetworkState(webrtc::MediaType::AUDIO));
+ EXPECT_EQ(webrtc::kNetworkUp,
+ call_.GetNetworkState(webrtc::MediaType::VIDEO));
+
+ channel_->OnReadyToSend(true);
+ EXPECT_EQ(webrtc::kNetworkUp,
+ call_.GetNetworkState(webrtc::MediaType::AUDIO));
+ EXPECT_EQ(webrtc::kNetworkUp,
+ call_.GetNetworkState(webrtc::MediaType::VIDEO));
+}
+
+// Test that playout is still started after changing parameters
+TEST_F(WebRtcVoiceEngineTestFake, PreservePlayoutWhenRecreateRecvStream) {
+ SetupRecvStream();
+ channel_->SetPlayout(true);
+ EXPECT_TRUE(GetRecvStream(kSsrcX).started());
+
+ // Changing RTP header extensions will recreate the AudioReceiveStream.
+ cricket::AudioRecvParameters parameters;
+ parameters.extensions.push_back(
+ webrtc::RtpExtension(webrtc::RtpExtension::kAudioLevelUri, 12));
+ channel_->SetRecvParameters(parameters);
+
+ EXPECT_TRUE(GetRecvStream(kSsrcX).started());
+}
+
+// Tests when GetSources is called with non-existing ssrc, it will return an
+// empty list of RtpSource without crashing.
+TEST_F(WebRtcVoiceEngineTestFake, GetSourcesWithNonExistingSsrc) {
+ // Setup an recv stream with |kSsrcX|.
+ SetupRecvStream();
+ cricket::WebRtcVoiceMediaChannel* media_channel =
+ static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_);
+ // Call GetSources with |kSsrcY| which doesn't exist.
+ std::vector<webrtc::RtpSource> sources = media_channel->GetSources(kSsrcY);
+ EXPECT_EQ(0u, sources.size());
+}
+
+// Tests that the library initializes and shuts down properly.
+TEST(WebRtcVoiceEngineTest, StartupShutdown) {
+ // If the VoiceEngine wants to gather available codecs early, that's fine but
+ // we never want it to create a decoder at this stage.
+ testing::NiceMock<webrtc::test::MockAudioDeviceModule> adm;
+ rtc::scoped_refptr<webrtc::AudioProcessing> apm =
+ webrtc::AudioProcessing::Create();
+ cricket::WebRtcVoiceEngine engine(
+ &adm, webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
+ webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm);
+ engine.Init();
+ webrtc::RtcEventLogNullImpl event_log;
+ std::unique_ptr<webrtc::Call> call(
+ webrtc::Call::Create(webrtc::Call::Config(&event_log)));
+ cricket::VoiceMediaChannel* channel = engine.CreateChannel(
+ call.get(), cricket::MediaConfig(), cricket::AudioOptions());
+ EXPECT_TRUE(channel != nullptr);
+ delete channel;
+}
+
+// Tests that reference counting on the external ADM is correct.
+TEST(WebRtcVoiceEngineTest, StartupShutdownWithExternalADM) {
+ testing::NiceMock<webrtc::test::MockAudioDeviceModule> adm;
+ EXPECT_CALL(adm, AddRef()).Times(3);
+ EXPECT_CALL(adm, Release())
+ .Times(3)
+ .WillRepeatedly(Return(rtc::RefCountReleaseStatus::kDroppedLastRef));
+ {
+ rtc::scoped_refptr<webrtc::AudioProcessing> apm =
+ webrtc::AudioProcessing::Create();
+ cricket::WebRtcVoiceEngine engine(
+ &adm, webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
+ webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm);
+ engine.Init();
+ webrtc::RtcEventLogNullImpl event_log;
+ std::unique_ptr<webrtc::Call> call(
+ webrtc::Call::Create(webrtc::Call::Config(&event_log)));
+ cricket::VoiceMediaChannel* channel = engine.CreateChannel(
+ call.get(), cricket::MediaConfig(), cricket::AudioOptions());
+ EXPECT_TRUE(channel != nullptr);
+ delete channel;
+ }
+}
+
+// Verify the payload id of common audio codecs, including CN, ISAC, and G722.
+TEST(WebRtcVoiceEngineTest, HasCorrectPayloadTypeMapping) {
+ // TODO(ossu): Why are the payload types of codecs with non-static payload
+ // type assignments checked here? It shouldn't really matter.
+ testing::NiceMock<webrtc::test::MockAudioDeviceModule> adm;
+ rtc::scoped_refptr<webrtc::AudioProcessing> apm =
+ webrtc::AudioProcessing::Create();
+ cricket::WebRtcVoiceEngine engine(
+ &adm, webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
+ webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm);
+ engine.Init();
+ for (const cricket::AudioCodec& codec : engine.send_codecs()) {
+ auto is_codec = [&codec](const char* name, int clockrate = 0) {
+ return STR_CASE_CMP(codec.name.c_str(), name) == 0 &&
+ (clockrate == 0 || codec.clockrate == clockrate);
+ };
+ if (is_codec("CN", 16000)) {
+ EXPECT_EQ(105, codec.id);
+ } else if (is_codec("CN", 32000)) {
+ EXPECT_EQ(106, codec.id);
+ } else if (is_codec("ISAC", 16000)) {
+ EXPECT_EQ(103, codec.id);
+ } else if (is_codec("ISAC", 32000)) {
+ EXPECT_EQ(104, codec.id);
+ } else if (is_codec("G722", 8000)) {
+ EXPECT_EQ(9, codec.id);
+ } else if (is_codec("telephone-event", 8000)) {
+ EXPECT_EQ(126, codec.id);
+ // TODO(solenberg): 16k, 32k, 48k DTMF should be dynamically assigned.
+ // Remove these checks once both send and receive side assigns payload types
+ // dynamically.
+ } else if (is_codec("telephone-event", 16000)) {
+ EXPECT_EQ(113, codec.id);
+ } else if (is_codec("telephone-event", 32000)) {
+ EXPECT_EQ(112, codec.id);
+ } else if (is_codec("telephone-event", 48000)) {
+ EXPECT_EQ(110, codec.id);
+ } else if (is_codec("opus")) {
+ EXPECT_EQ(111, codec.id);
+ ASSERT_TRUE(codec.params.find("minptime") != codec.params.end());
+ EXPECT_EQ("10", codec.params.find("minptime")->second);
+ ASSERT_TRUE(codec.params.find("useinbandfec") != codec.params.end());
+ EXPECT_EQ("1", codec.params.find("useinbandfec")->second);
+ }
+ }
+}
+
+// Tests that VoE supports at least 32 channels
+TEST(WebRtcVoiceEngineTest, Has32Channels) {
+ testing::NiceMock<webrtc::test::MockAudioDeviceModule> adm;
+ rtc::scoped_refptr<webrtc::AudioProcessing> apm =
+ webrtc::AudioProcessing::Create();
+ cricket::WebRtcVoiceEngine engine(
+ &adm, webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
+ webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm);
+ engine.Init();
+ webrtc::RtcEventLogNullImpl event_log;
+ std::unique_ptr<webrtc::Call> call(
+ webrtc::Call::Create(webrtc::Call::Config(&event_log)));
+
+ cricket::VoiceMediaChannel* channels[32];
+ int num_channels = 0;
+ while (num_channels < arraysize(channels)) {
+ cricket::VoiceMediaChannel* channel = engine.CreateChannel(
+ call.get(), cricket::MediaConfig(), cricket::AudioOptions());
+ if (!channel)
+ break;
+ channels[num_channels++] = channel;
+ }
+
+ int expected = arraysize(channels);
+ EXPECT_EQ(expected, num_channels);
+
+ while (num_channels > 0) {
+ delete channels[--num_channels];
+ }
+}
+
+// Test that we set our preferred codecs properly.
+TEST(WebRtcVoiceEngineTest, SetRecvCodecs) {
+ // TODO(ossu): I'm not sure of the intent of this test. It's either:
+ // - Check that our builtin codecs are usable by Channel.
+ // - The codecs provided by the engine is usable by Channel.
+ // It does not check that the codecs in the RecvParameters are actually
+ // what we sent in - though it's probably reasonable to expect so, if
+ // SetRecvParameters returns true.
+ // I think it will become clear once audio decoder injection is completed.
+ testing::NiceMock<webrtc::test::MockAudioDeviceModule> adm;
+ rtc::scoped_refptr<webrtc::AudioProcessing> apm =
+ webrtc::AudioProcessing::Create();
+ cricket::WebRtcVoiceEngine engine(
+ &adm, webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
+ webrtc::CreateBuiltinAudioDecoderFactory(), nullptr, apm);
+ engine.Init();
+ webrtc::RtcEventLogNullImpl event_log;
+ std::unique_ptr<webrtc::Call> call(
+ webrtc::Call::Create(webrtc::Call::Config(&event_log)));
+ cricket::WebRtcVoiceMediaChannel channel(&engine, cricket::MediaConfig(),
+ cricket::AudioOptions(), call.get());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs = engine.recv_codecs();
+ EXPECT_TRUE(channel.SetRecvParameters(parameters));
+}
+
+TEST(WebRtcVoiceEngineTest, CollectRecvCodecs) {
+ std::vector<webrtc::AudioCodecSpec> specs;
+ webrtc::AudioCodecSpec spec1{{"codec1", 48000, 2, {{"param1", "value1"}}},
+ {48000, 2, 16000, 10000, 20000}};
+ spec1.info.allow_comfort_noise = false;
+ spec1.info.supports_network_adaption = true;
+ specs.push_back(spec1);
+ webrtc::AudioCodecSpec spec2{{"codec2", 32000, 1}, {32000, 1, 32000}};
+ spec2.info.allow_comfort_noise = false;
+ specs.push_back(spec2);
+ specs.push_back(webrtc::AudioCodecSpec{
+ {"codec3", 16000, 1, {{"param1", "value1b"}, {"param2", "value2"}}},
+ {16000, 1, 13300}});
+ specs.push_back(
+ webrtc::AudioCodecSpec{{"codec4", 8000, 1}, {8000, 1, 64000}});
+ specs.push_back(
+ webrtc::AudioCodecSpec{{"codec5", 8000, 2}, {8000, 1, 64000}});
+
+ rtc::scoped_refptr<webrtc::MockAudioEncoderFactory> unused_encoder_factory =
+ webrtc::MockAudioEncoderFactory::CreateUnusedFactory();
+ rtc::scoped_refptr<webrtc::MockAudioDecoderFactory> mock_decoder_factory =
+ new rtc::RefCountedObject<webrtc::MockAudioDecoderFactory>;
+ EXPECT_CALL(*mock_decoder_factory.get(), GetSupportedDecoders())
+ .WillOnce(Return(specs));
+ testing::NiceMock<webrtc::test::MockAudioDeviceModule> adm;
+
+ rtc::scoped_refptr<webrtc::AudioProcessing> apm =
+ webrtc::AudioProcessing::Create();
+ cricket::WebRtcVoiceEngine engine(&adm, unused_encoder_factory,
+ mock_decoder_factory, nullptr, apm);
+ engine.Init();
+ auto codecs = engine.recv_codecs();
+ EXPECT_EQ(11, codecs.size());
+
+ // Rather than just ASSERTing that there are enough codecs, ensure that we can
+ // check the actual values safely, to provide better test results.
+ auto get_codec =
+ [&codecs](size_t index) -> const cricket::AudioCodec& {
+ static const cricket::AudioCodec missing_codec(0, "<missing>", 0, 0, 0);
+ if (codecs.size() > index)
+ return codecs[index];
+ return missing_codec;
+ };
+
+ // Ensure the general codecs are generated first and in order.
+ for (size_t i = 0; i != specs.size(); ++i) {
+ EXPECT_EQ(specs[i].format.name, get_codec(i).name);
+ EXPECT_EQ(specs[i].format.clockrate_hz, get_codec(i).clockrate);
+ EXPECT_EQ(specs[i].format.num_channels, get_codec(i).channels);
+ EXPECT_EQ(specs[i].format.parameters, get_codec(i).params);
+ }
+
+ // Find the index of a codec, or -1 if not found, so that we can easily check
+ // supplementary codecs are ordered after the general codecs.
+ auto find_codec =
+ [&codecs](const webrtc::SdpAudioFormat& format) -> int {
+ for (size_t i = 0; i != codecs.size(); ++i) {
+ const cricket::AudioCodec& codec = codecs[i];
+ if (STR_CASE_CMP(codec.name.c_str(), format.name.c_str()) == 0 &&
+ codec.clockrate == format.clockrate_hz &&
+ codec.channels == format.num_channels) {
+ return rtc::checked_cast<int>(i);
+ }
+ }
+ return -1;
+ };
+
+ // Ensure all supplementary codecs are generated last. Their internal ordering
+ // is not important.
+ // Without this cast, the comparison turned unsigned and, thus, failed for -1.
+ const int num_specs = static_cast<int>(specs.size());
+ EXPECT_GE(find_codec({"cn", 8000, 1}), num_specs);
+ EXPECT_GE(find_codec({"cn", 16000, 1}), num_specs);
+ EXPECT_EQ(find_codec({"cn", 32000, 1}), -1);
+ EXPECT_GE(find_codec({"telephone-event", 8000, 1}), num_specs);
+ EXPECT_GE(find_codec({"telephone-event", 16000, 1}), num_specs);
+ EXPECT_GE(find_codec({"telephone-event", 32000, 1}), num_specs);
+ EXPECT_GE(find_codec({"telephone-event", 48000, 1}), num_specs);
+}
diff --git a/third_party/libwebrtc/webrtc/media/rtc_audio_video_gn/moz.build b/third_party/libwebrtc/webrtc/media/rtc_audio_video_gn/moz.build
new file mode 100644
index 0000000000..721b4799e9
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/rtc_audio_video_gn/moz.build
@@ -0,0 +1,257 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["CHROMIUM_BUILD"] = True
+DEFINES["HAVE_WEBRTC_VIDEO"] = True
+DEFINES["HAVE_WEBRTC_VOICE"] = True
+DEFINES["V8_DEPRECATION_WARNINGS"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_INTELLIGIBILITY_ENHANCER"] = "0"
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_OPUS_SUPPORT_120MS_PTIME"] = "1"
+DEFINES["WEBRTC_RESTRICT_LOGGING"] = True
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "/ipc/chromium/src",
+ "/ipc/glue",
+ "/media/libyuv/libyuv/include/",
+ "/media/libyuv/libyuv/include/",
+ "/third_party/libwebrtc/webrtc/",
+ "/third_party/libwebrtc/webrtc/common_video/include/",
+ "/third_party/libwebrtc/webrtc/modules/audio_coding/include/",
+ "/third_party/libwebrtc/webrtc/modules/audio_device/dummy/",
+ "/third_party/libwebrtc/webrtc/modules/audio_device/include/",
+ "/third_party/libwebrtc/webrtc/modules/include/",
+ "/third_party/libwebrtc/webrtc/modules/include/"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/webrtc/media/engine/adm_helpers.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/apm_helpers.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/constants.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/convert_legacy_video_factory.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/encoder_simulcast_proxy.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/internaldecoderfactory.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/internalencoderfactory.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/payload_type_mapper.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/scopedvideodecoder.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/scopedvideoencoder.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/simulcast.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/simulcast_encoder_adapter.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/videodecodersoftwarefallbackwrapper.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/videoencodersoftwarefallbackwrapper.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/webrtcvideodecoderfactory.cc",
+ "/third_party/libwebrtc/webrtc/media/engine/webrtcvideoencoderfactory.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+ DEFINES["WTF_USE_DYNAMIC_ANNOTATIONS"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION"] = "r12b"
+ DEFINES["DISABLE_NACL"] = True
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["NO_TCMALLOC"] = True
+ DEFINES["USE_OPENSSL_CERTS"] = "1"
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["__GNU_SOURCE"] = "1"
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["NO_TCMALLOC"] = True
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORE"] = "0"
+
+ OS_LIBS += [
+ "-framework AVFoundation",
+ "-framework CoreMedia",
+ "-framework CoreVideo",
+ "-framework Foundation"
+ ]
+
+if CONFIG["OS_TARGET"] == "DragonFly":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "FreeBSD":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+ OS_LIBS += [
+ "m",
+ "rt"
+ ]
+
+if CONFIG["OS_TARGET"] == "NetBSD":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NO_TCMALLOC"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "1"
+ DEFINES["UNICODE"] = True
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_CRT_SECURE_NO_WARNINGS"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_USING_V110_SDK71_"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "Strmiids",
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "DragonFly":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "FreeBSD":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "NetBSD":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["CR_XCODE_VERSION"] = "0120"
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["CR_XCODE_VERSION"] = "0920"
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "FreeBSD":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["DISABLE_NACL"] = True
+ DEFINES["NO_TCMALLOC"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "NetBSD":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+Library("rtc_audio_video_gn")
diff --git a/third_party/libwebrtc/webrtc/media/rtc_h264_profile_id_gn/moz.build b/third_party/libwebrtc/webrtc/media/rtc_h264_profile_id_gn/moz.build
new file mode 100644
index 0000000000..5a32e0fad1
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/rtc_h264_profile_id_gn/moz.build
@@ -0,0 +1,217 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["CHROMIUM_BUILD"] = True
+DEFINES["V8_DEPRECATION_WARNINGS"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_RESTRICT_LOGGING"] = True
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "/ipc/chromium/src",
+ "/ipc/glue",
+ "/third_party/libwebrtc/webrtc/"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+ DEFINES["WTF_USE_DYNAMIC_ANNOTATIONS"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION"] = "r12b"
+ DEFINES["DISABLE_NACL"] = True
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["NO_TCMALLOC"] = True
+ DEFINES["USE_OPENSSL_CERTS"] = "1"
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["__GNU_SOURCE"] = "1"
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["NO_TCMALLOC"] = True
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORE"] = "0"
+
+ OS_LIBS += [
+ "-framework Foundation"
+ ]
+
+if CONFIG["OS_TARGET"] == "DragonFly":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "FreeBSD":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "NetBSD":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NO_TCMALLOC"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "1"
+ DEFINES["UNICODE"] = True
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_CRT_SECURE_NO_WARNINGS"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_USING_V110_SDK71_"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "DragonFly":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "FreeBSD":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "NetBSD":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["CR_XCODE_VERSION"] = "0120"
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["CR_XCODE_VERSION"] = "0920"
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "FreeBSD":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["DISABLE_NACL"] = True
+ DEFINES["NO_TCMALLOC"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "NetBSD":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+Library("rtc_h264_profile_id_gn")
diff --git a/third_party/libwebrtc/webrtc/media/rtc_media_base_gn/moz.build b/third_party/libwebrtc/webrtc/media/rtc_media_base_gn/moz.build
new file mode 100644
index 0000000000..efd275fb7f
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/rtc_media_base_gn/moz.build
@@ -0,0 +1,236 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["CHROMIUM_BUILD"] = True
+DEFINES["V8_DEPRECATION_WARNINGS"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_RESTRICT_LOGGING"] = True
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "/ipc/chromium/src",
+ "/ipc/glue",
+ "/media/libyuv/libyuv/include/",
+ "/third_party/libwebrtc/webrtc/"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.cc",
+ "/third_party/libwebrtc/webrtc/media/base/codec.cc",
+ "/third_party/libwebrtc/webrtc/media/base/mediaconstants.cc",
+ "/third_party/libwebrtc/webrtc/media/base/mediaengine.cc",
+ "/third_party/libwebrtc/webrtc/media/base/streamparams.cc",
+ "/third_party/libwebrtc/webrtc/media/base/videoadapter.cc",
+ "/third_party/libwebrtc/webrtc/media/base/videobroadcaster.cc",
+ "/third_party/libwebrtc/webrtc/media/base/videocapturer.cc",
+ "/third_party/libwebrtc/webrtc/media/base/videocommon.cc",
+ "/third_party/libwebrtc/webrtc/media/base/videosourcebase.cc",
+ "/third_party/libwebrtc/webrtc/media/base/videosourceinterface.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+ DEFINES["WTF_USE_DYNAMIC_ANNOTATIONS"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION"] = "r12b"
+ DEFINES["DISABLE_NACL"] = True
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["NO_TCMALLOC"] = True
+ DEFINES["USE_OPENSSL_CERTS"] = "1"
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["__GNU_SOURCE"] = "1"
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["NO_TCMALLOC"] = True
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORE"] = "0"
+
+ OS_LIBS += [
+ "-framework Foundation"
+ ]
+
+if CONFIG["OS_TARGET"] == "DragonFly":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "FreeBSD":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+ OS_LIBS += [
+ "rt"
+ ]
+
+if CONFIG["OS_TARGET"] == "NetBSD":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NO_TCMALLOC"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "1"
+ DEFINES["UNICODE"] = True
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_CRT_SECURE_NO_WARNINGS"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_USING_V110_SDK71_"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "DragonFly":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "FreeBSD":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "NetBSD":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["CR_XCODE_VERSION"] = "0120"
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["CR_XCODE_VERSION"] = "0920"
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "FreeBSD":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["DISABLE_NACL"] = True
+ DEFINES["NO_TCMALLOC"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "NetBSD":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+Library("rtc_media_base_gn")
diff --git a/third_party/libwebrtc/webrtc/media/rtc_media_gn/moz.build b/third_party/libwebrtc/webrtc/media/rtc_media_gn/moz.build
new file mode 100644
index 0000000000..4c1156d5a4
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/rtc_media_gn/moz.build
@@ -0,0 +1,194 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["CHROMIUM_BUILD"] = True
+DEFINES["HAVE_WEBRTC_VIDEO"] = True
+DEFINES["HAVE_WEBRTC_VOICE"] = True
+DEFINES["V8_DEPRECATION_WARNINGS"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_RESTRICT_LOGGING"] = True
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "/ipc/chromium/src",
+ "/ipc/glue",
+ "/third_party/libwebrtc/webrtc/"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+ DEFINES["WTF_USE_DYNAMIC_ANNOTATIONS"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION"] = "r12b"
+ DEFINES["DISABLE_NACL"] = True
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["NO_TCMALLOC"] = True
+ DEFINES["USE_OPENSSL_CERTS"] = "1"
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["__GNU_SOURCE"] = "1"
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["NO_TCMALLOC"] = True
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORE"] = "0"
+
+ OS_LIBS += [
+ "-framework AVFoundation",
+ "-framework CoreMedia",
+ "-framework CoreVideo",
+ "-framework Foundation"
+ ]
+
+if CONFIG["OS_TARGET"] == "DragonFly":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "FreeBSD":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+ OS_LIBS += [
+ "m",
+ "rt"
+ ]
+
+if CONFIG["OS_TARGET"] == "NetBSD":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NO_TCMALLOC"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "1"
+ DEFINES["UNICODE"] = True
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_CRT_SECURE_NO_WARNINGS"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_USING_V110_SDK71_"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "Strmiids",
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "DragonFly":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "FreeBSD":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "NetBSD":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if not CONFIG["MOZ_DEBUG"] and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_FORTIFY_SOURCE"] = "2"
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["CR_XCODE_VERSION"] = "0120"
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["CR_XCODE_VERSION"] = "0920"
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["DISABLE_NACL"] = True
+ DEFINES["NO_TCMALLOC"] = True
+
+Library("rtc_media_gn")